chore(core): Remove mysql and mariadb specfic code (#24908)

Co-authored-by: Danny Martini <danny@n8n.io>
Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
Tomi Turtiainen
2026-01-29 17:57:33 +02:00
committed by GitHub
parent f3e2930f0e
commit 39df0d5ff1
121 changed files with 132 additions and 3056 deletions

View File

@@ -76,8 +76,6 @@ body:
options:
- SQLite (default)
- PostgreSQL
- MySQL
- MariaDB
default: 0
validations:
required: true

View File

@@ -173,7 +173,7 @@ These only run if specific files changed:
| Files Changed | Workflow | Branch |
|------------------------------------------------------------------------|-----------------------------|------------|
| `packages/@n8n/task-runner-python/**` | `ci-python.yml` | any |
| `packages/cli/src/databases/**`, `*.entity.ts`, `*.repository.ts` | `test-db-postgres-mysql.yml`| any |
| `packages/cli/src/databases/**`, `*.entity.ts`, `*.repository.ts` | `test-db.yml` | any |
| `packages/frontend/@n8n/storybook/**`, design-system, chat | `test-visual-storybook.yml` | master |
| `docker/images/n8n-base/Dockerfile` | `build-base-image.yml` | any |
| `**/package.json`, `**/turbo.json` | `build-windows.yml` | master |
@@ -367,7 +367,7 @@ Push to master/1.x
| Schedule (UTC) | Workflow | Purpose |
|---------------------------|-----------------------------------|--------------------------|
| Daily 00:00 | `docker-build-push.yml` | Nightly Docker images |
| Daily 00:00 | `test-db-postgres-mysql.yml` | Database compatibility |
| Daily 00:00 | `test-db.yml` | Database compatibility |
| Daily 00:00 | `test-e2e-performance-reusable.yml`| Performance E2E |
| Daily 00:00 | `test-visual-storybook.yml` | Storybook deploy |
| Daily 00:00 | `test-visual-chromatic.yml` | Visual regression |

View File

@@ -1,25 +1,4 @@
services:
mariadb:
image: mariadb:10.5
environment:
- MARIADB_DATABASE=n8n
- MARIADB_ROOT_PASSWORD=password
- MARIADB_MYSQL_LOCALHOST_USER=true
ports:
- 3306:3306
tmpfs:
- /var/lib/mysql
mysql-8.4:
image: mysql:8.4
environment:
- MYSQL_DATABASE=n8n
- MYSQL_ROOT_PASSWORD=password
ports:
- 3306:3306
tmpfs:
- /var/lib/mysql
postgres:
image: postgres:16
restart: always

View File

@@ -1,8 +1,8 @@
name: 'Test: DB Postgres MySQL'
name: "Test: DB Postgres"
on:
schedule:
- cron: '0 0 * * *'
- cron: "0 0 * * *"
workflow_dispatch:
pull_request:
paths:
@@ -14,7 +14,7 @@ on:
- packages/cli/test/shared/db/**
- packages/@n8n/db/**
- packages/cli/**/__tests__/**
- .github/workflows/test-db-postgres-mysql.yml
- .github/workflows/test-db.yml
- .github/docker-compose.yml
concurrency:
@@ -22,7 +22,7 @@ concurrency:
cancel-in-progress: true
env:
NODE_OPTIONS: '--max-old-space-size=3072'
NODE_OPTIONS: "--max-old-space-size=3072"
jobs:
build:
@@ -53,65 +53,6 @@ jobs:
working-directory: packages/cli
run: pnpm test:sqlite
mariadb:
name: MariaDB
needs: build
runs-on: blacksmith-4vcpu-ubuntu-2204
timeout-minutes: 30
if: false
env:
DB_MYSQLDB_PASSWORD: password
DB_MYSQLDB_POOL_SIZE: 1
DB_MYSQLDB_CONNECTION_TIMEOUT: 120000
DB_MYSQLDB_ACQUIRE_TIMEOUT: 120000
DB_MYSQLDB_TIMEOUT: 120000
NODE_OPTIONS: '--max-old-space-size=7168'
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: ./.github/actions/setup-nodejs
- name: Start MariaDB
uses: isbang/compose-action@802a148945af6399a338c7906c267331b39a71af # v2.0.0
with:
compose-file: ./.github/docker-compose.yml
services: |
mariadb
- name: Test MariaDB
working-directory: packages/cli
run: pnpm test:mariadb --testTimeout 120000
mysql:
name: MySQL 8.4
needs: build
runs-on: blacksmith-2vcpu-ubuntu-2204
timeout-minutes: 20
if: false
env:
DB_MYSQLDB_PASSWORD: password
DB_MYSQLDB_POOL_SIZE: 1
DB_MYSQLDB_CONNECTION_TIMEOUT: 120000
DB_MYSQLDB_ACQUIRE_TIMEOUT: 120000
DB_MYSQLDB_TIMEOUT: 120000
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- name: Setup and Build
uses: ./.github/actions/setup-nodejs
- name: Start MySQL
uses: isbang/compose-action@802a148945af6399a338c7906c267331b39a71af # v2.0.0
with:
compose-file: ./.github/docker-compose.yml
services: mysql-8.4
- name: Test MySQL
working-directory: packages/cli
# We sleep here due to flakiness with DB tests if we connect to the database too soon
run: sleep 2s && pnpm test:mysql --testTimeout 120000
postgres:
name: Postgres
needs: build
@@ -142,6 +83,6 @@ jobs:
if: failure() && github.ref == 'refs/heads/master'
with:
status: ${{ job.status }}
channel: '#alerts-build'
channel: "#alerts-build"
webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }}
message: Postgres, MariaDB or MySQL tests failed (${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
message: Postgres or SQLite tests failed (${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})

View File

@@ -64,7 +64,7 @@ export interface FrontendSettings {
settingsMode?: 'public' | 'authenticated';
inE2ETests: boolean;
isDocker: boolean;
databaseType: 'sqlite' | 'mariadb' | 'mysqldb' | 'postgresdb';
databaseType: 'sqlite' | 'postgresdb';
endpointForm: string;
endpointFormTest: string;
endpointFormWaiting: string;

View File

@@ -17,7 +17,6 @@ export interface TestMigrationContext {
queryRunner: QueryRunner;
tablePrefix: string;
dbType: DatabaseType;
isMysql: boolean;
isSqlite: boolean;
isPostgres: boolean;
escape: {
@@ -41,7 +40,6 @@ export function createTestMigrationContext(dataSource: DataSource): TestMigratio
queryRunner,
tablePrefix,
dbType,
isMysql: ['mariadb', 'mysqldb'].includes(dbType),
isSqlite: dbType === 'sqlite',
isPostgres: dbType === 'postgresdb',
escape: {

View File

@@ -4,6 +4,7 @@ import { AuthRolesService, DbConnection, DbConnectionOptions } from '@n8n/db';
import { Container } from '@n8n/di';
import type { DataSourceOptions } from '@n8n/typeorm';
import { DataSource as Connection } from '@n8n/typeorm';
import assert from 'assert';
import { randomString } from 'n8n-workflow';
export const testDbPrefix = 'n8n_test_';
@@ -12,15 +13,16 @@ let isInitialized = false;
/**
* Generate options for a bootstrap DB connection, to create and drop test databases.
*/
export const getBootstrapDBOptions = (dbType: 'postgresdb' | 'mysqldb'): DataSourceOptions => {
export const getBootstrapDBOptions = (): DataSourceOptions => {
const globalConfig = Container.get(GlobalConfig);
const type = dbType === 'postgresdb' ? 'postgres' : 'mysql';
assert(globalConfig.database.type === 'postgresdb', 'Database type must be postgresdb');
return {
type,
...Container.get(DbConnectionOptions).getOverrides(dbType),
database: type,
type: 'postgres',
...Container.get(DbConnectionOptions).getPostgresOverrides(),
database: globalConfig.database.postgresdb.database,
entityPrefix: globalConfig.database.tablePrefix,
schema: dbType === 'postgresdb' ? globalConfig.database.postgresdb.schema : undefined,
schema: globalConfig.database.postgresdb.schema,
};
};
@@ -35,19 +37,11 @@ export async function init() {
const testDbName = `${testDbPrefix}${randomString(6, 10).toLowerCase()}_${Date.now()}`;
if (dbType === 'postgresdb') {
const bootstrapPostgres = await new Connection(
getBootstrapDBOptions('postgresdb'),
).initialize();
const bootstrapPostgres = await new Connection(getBootstrapDBOptions()).initialize();
await bootstrapPostgres.query(`CREATE DATABASE ${testDbName}`);
await bootstrapPostgres.destroy();
globalConfig.database.postgresdb.database = testDbName;
} else if (dbType === 'mysqldb' || dbType === 'mariadb') {
const bootstrapMysql = await new Connection(getBootstrapDBOptions('mysqldb')).initialize();
await bootstrapMysql.query(`CREATE DATABASE ${testDbName} DEFAULT CHARACTER SET utf8mb4`);
await bootstrapMysql.destroy();
globalConfig.database.mysqldb.database = testDbName;
}
const dbConnection = Container.get(DbConnection);
@@ -98,44 +92,31 @@ type EntityName =
*/
export async function truncate(entities: EntityName[]) {
const connection = Container.get(Connection);
const dbType = connection.options.type;
// Disable FK checks for MySQL/MariaDB to handle circular dependencies
if (dbType === 'mysql' || dbType === 'mariadb') {
await connection.query('SET FOREIGN_KEY_CHECKS=0');
// Collect junction tables to clean
const junctionTablesToClean = new Set<string>();
// Find all junction tables associated with the entities being truncated
for (const name of entities) {
try {
const metadata = connection.getMetadata(name);
for (const relation of metadata.manyToManyRelations) {
if (relation.junctionEntityMetadata) {
const junctionTableName = relation.junctionEntityMetadata.tablePath;
junctionTablesToClean.add(junctionTableName);
}
}
} catch (error) {
// Skip
}
}
try {
// Collect junction tables to clean
const junctionTablesToClean = new Set<string>();
// Clean junction tables first (since they reference the entities)
for (const tableName of junctionTablesToClean) {
await connection.query(`DELETE FROM ${tableName}`);
}
// Find all junction tables associated with the entities being truncated
for (const name of entities) {
try {
const metadata = connection.getMetadata(name);
for (const relation of metadata.manyToManyRelations) {
if (relation.junctionEntityMetadata) {
const junctionTableName = relation.junctionEntityMetadata.tablePath;
junctionTablesToClean.add(junctionTableName);
}
}
} catch (error) {
// Skip
}
}
// Clean junction tables first (since they reference the entities)
for (const tableName of junctionTablesToClean) {
await connection.query(`DELETE FROM ${tableName}`);
}
for (const name of entities) {
await connection.getRepository(name).delete({});
}
} finally {
// Re-enable FK checks
if (dbType === 'mysql' || dbType === 'mariadb') {
await connection.query('SET FOREIGN_KEY_CHECKS=1');
}
for (const name of entities) {
await connection.getRepository(name).delete({});
}
}

View File

@@ -5,17 +5,6 @@ import { Config, Env, Nested } from '../decorators';
const dbLoggingOptionsSchema = z.enum(['query', 'error', 'schema', 'warn', 'info', 'log', 'all']);
type DbLoggingOptions = z.infer<typeof dbLoggingOptionsSchema>;
class MySqlMariaDbNotSupportedError extends Error {
// Workaround to not get this reported to Sentry
readonly cause: { level: 'warning' } = {
level: 'warning',
};
constructor() {
super('MySQL and MariaDB have been removed. Please migrate to PostgreSQL.');
}
}
@Config
class LoggingConfig {
/** Whether database logging is enabled. */
@@ -107,33 +96,6 @@ class PostgresConfig {
ssl: PostgresSSLConfig;
}
@Config
class MysqlConfig {
/** @deprecated MySQL database name */
@Env('DB_MYSQLDB_DATABASE')
database: string = 'n8n';
/** MySQL database host */
@Env('DB_MYSQLDB_HOST')
host: string = 'localhost';
/** MySQL database password */
@Env('DB_MYSQLDB_PASSWORD')
password: string = '';
/** MySQL database port */
@Env('DB_MYSQLDB_PORT')
port: number = 3306;
/** MySQL database user */
@Env('DB_MYSQLDB_USER')
user: string = 'root';
/** MySQL connection pool size */
@Env('DB_MYSQLDB_POOL_SIZE')
poolSize: number = 10;
}
const sqlitePoolSizeSchema = z.coerce.number().int().gte(1);
@Config
@@ -155,7 +117,7 @@ export class SqliteConfig {
executeVacuumOnStartup: boolean = false;
}
const dbTypeSchema = z.enum(['sqlite', 'mariadb', 'mysqldb', 'postgresdb']);
const dbTypeSchema = z.enum(['sqlite', 'postgresdb']);
type DbType = z.infer<typeof dbTypeSchema>;
@Config
@@ -180,15 +142,6 @@ export class DatabaseConfig {
@Nested
postgresdb: PostgresConfig;
@Nested
mysqldb: MysqlConfig;
@Nested
sqlite: SqliteConfig;
sanitize() {
if (this.type === 'mariadb' || this.type === 'mysqldb') {
throw new MySqlMariaDbNotSupportedError();
}
}
}

View File

@@ -68,14 +68,6 @@ describe('GlobalConfig', () => {
maxQueryExecutionTime: 0,
options: 'error',
},
mysqldb: {
database: 'n8n',
host: 'localhost',
password: '',
port: 3306,
user: 'root',
poolSize: 10,
},
postgresdb: {
database: 'n8n',
host: 'localhost',
@@ -457,7 +449,6 @@ describe('GlobalConfig', () => {
...defaultConfig,
database: {
logging: defaultConfig.database.logging,
mysqldb: defaultConfig.database.mysqldb,
postgresdb: {
...defaultConfig.database.postgresdb,
host: 'some-host',

View File

@@ -3,7 +3,6 @@ import type { GlobalConfig, InstanceSettingsConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import path from 'path';
import { mysqlMigrations } from '../../migrations/mysqldb';
import { postgresMigrations } from '../../migrations/postgresdb';
import { sqliteMigrations } from '../../migrations/sqlite';
import { DbConnectionOptions } from '../db-connection-options';
@@ -136,57 +135,6 @@ describe('DbConnectionOptions', () => {
});
});
describe('for MySQL / MariaDB', () => {
beforeEach(() => {
dbConfig.mysqldb = {
database: 'test_db',
host: 'localhost',
port: 3306,
user: 'root',
password: 'password',
poolSize: 10,
};
});
it('should return MySQL connection options when type is mysqldb', () => {
dbConfig.type = 'mysqldb';
const result = dbConnectionOptions.getOptions();
expect(result).toEqual({
type: 'mysql',
...commonOptions,
database: 'test_db',
host: 'localhost',
port: 3306,
username: 'root',
password: 'password',
migrations: mysqlMigrations,
timezone: 'Z',
poolSize: 10,
});
});
it('should return MariaDB connection options when type is mariadb', () => {
dbConfig.type = 'mariadb';
const result = dbConnectionOptions.getOptions();
expect(result).toEqual({
type: 'mariadb',
...commonOptions,
database: 'test_db',
host: 'localhost',
port: 3306,
username: 'root',
password: 'password',
migrations: mysqlMigrations,
timezone: 'Z',
poolSize: 10,
});
});
});
describe('logging', () => {
beforeEach(() => {
dbConfig.type = 'sqlite';

View File

@@ -3,7 +3,7 @@ import type { Logger } from '@n8n/backend-common';
import type { DatabaseConfig } from '@n8n/config';
import { DataSource, type DataSourceOptions } from '@n8n/typeorm';
import { mock, mockDeep } from 'jest-mock-extended';
import type { BinaryDataConfig, ErrorReporter } from 'n8n-core';
import type { ErrorReporter } from 'n8n-core';
import { DbConnectionTimeoutError } from 'n8n-workflow';
import * as migrationHelper from '../../migrations/migration-helpers';
@@ -24,10 +24,6 @@ describe('DbConnection', () => {
const errorReporter = mock<ErrorReporter>();
const databaseConfig = mock<DatabaseConfig>();
const logger = mock<Logger>();
const binaryDataConfig = mock<BinaryDataConfig>({
availableModes: ['filesystem'],
dbMaxFileSize: 512,
});
const dataSource = mockDeep<DataSource>({ options: { migrations } });
const connectionOptions = mockDeep<DbConnectionOptions>();
const postgresOptions: DataSourceOptions = {
@@ -46,13 +42,7 @@ describe('DbConnection', () => {
connectionOptions.getOptions.mockReturnValue(postgresOptions);
(DataSource as jest.Mock) = jest.fn().mockImplementation(() => dataSource);
dbConnection = new DbConnection(
errorReporter,
connectionOptions,
databaseConfig,
logger,
binaryDataConfig,
);
dbConnection = new DbConnection(errorReporter, connectionOptions, databaseConfig, logger);
});
describe('init', () => {
@@ -206,7 +196,6 @@ describe('DbConnection', () => {
pingIntervalSeconds: 1,
}),
logger,
binaryDataConfig,
);
// eslint-disable-next-line @typescript-eslint/no-explicit-any

View File

@@ -2,7 +2,6 @@ import { ModuleRegistry } from '@n8n/backend-common';
import { DatabaseConfig, InstanceSettingsConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import type { DataSourceOptions, LoggerOptions } from '@n8n/typeorm';
import type { MysqlConnectionOptions } from '@n8n/typeorm/driver/mysql/MysqlConnectionOptions';
import type { PostgresConnectionOptions } from '@n8n/typeorm/driver/postgres/PostgresConnectionOptions';
import type { SqlitePooledConnectionOptions } from '@n8n/typeorm/driver/sqlite-pooled/SqlitePooledConnectionOptions';
import { UserError } from 'n8n-workflow';
@@ -10,7 +9,6 @@ import type { TlsOptions } from 'node:tls';
import path from 'path';
import { entities } from '../entities';
import { mysqlMigrations } from '../migrations/mysqldb';
import { postgresMigrations } from '../migrations/postgresdb';
import { sqliteMigrations } from '../migrations/sqlite';
import { subscribers } from '../subscribers';
@@ -23,14 +21,13 @@ export class DbConnectionOptions {
private readonly moduleRegistry: ModuleRegistry,
) {}
getOverrides(dbType: 'postgresdb' | 'mysqldb') {
const dbConfig = this.config[dbType];
getPostgresOverrides() {
return {
database: dbConfig.database,
host: dbConfig.host,
port: dbConfig.port,
username: dbConfig.user,
password: dbConfig.password,
database: this.config.postgresdb.database,
host: this.config.postgresdb.host,
port: this.config.postgresdb.port,
username: this.config.postgresdb.user,
password: this.config.postgresdb.password,
};
}
@@ -41,9 +38,6 @@ export class DbConnectionOptions {
return this.getSqliteConnectionOptions();
case 'postgresdb':
return this.getPostgresConnectionOptions();
case 'mariadb':
case 'mysqldb':
return this.getMysqlConnectionOptions(dbType);
default:
throw new UserError('Database type currently not supported', { extra: { dbType } });
}
@@ -111,7 +105,7 @@ export class DbConnectionOptions {
return {
type: 'postgres',
...this.getCommonOptions(),
...this.getOverrides('postgresdb'),
...this.getPostgresOverrides(),
schema: postgresConfig.schema,
poolSize: postgresConfig.poolSize,
migrations: postgresMigrations,
@@ -123,16 +117,4 @@ export class DbConnectionOptions {
},
};
}
private getMysqlConnectionOptions(dbType: 'mariadb' | 'mysqldb'): MysqlConnectionOptions {
const { mysqldb: mysqlConfig } = this.config;
return {
type: dbType === 'mysqldb' ? 'mysql' : 'mariadb',
...this.getCommonOptions(),
...this.getOverrides('mysqldb'),
poolSize: mysqlConfig.poolSize,
migrations: mysqlMigrations,
timezone: 'Z', // set UTC as default
};
}
}

View File

@@ -4,7 +4,7 @@ import { Time } from '@n8n/constants';
import { Memoized } from '@n8n/decorators';
import { Container, Service } from '@n8n/di';
import { DataSource } from '@n8n/typeorm';
import { BinaryDataConfig, ErrorReporter } from 'n8n-core';
import { ErrorReporter } from 'n8n-core';
import { DbConnectionTimeoutError, ensureError, OperationalError } from 'n8n-workflow';
import { setTimeout as setTimeoutP } from 'timers/promises';
@@ -34,7 +34,6 @@ export class DbConnection {
private readonly connectionOptions: DbConnectionOptions,
private readonly databaseConfig: DatabaseConfig,
private readonly logger: Logger,
private readonly binaryDataConfig: BinaryDataConfig,
) {
this.dataSource = new DataSource(this.options);
Container.set(DataSource, this.dataSource);
@@ -67,21 +66,6 @@ export class DbConnection {
throw error;
}
if (
(options.type === 'mysql' || options.type === 'mariadb') &&
this.binaryDataConfig.availableModes.includes('database')
) {
const maxAllowedPacket = this.binaryDataConfig.dbMaxFileSize * 1024 * 1024;
try {
await this.dataSource.query(`SET GLOBAL max_allowed_packet = ${maxAllowedPacket}`);
} catch {
this.logger.warn(
`Failed to set \`max_allowed_packet\` to ${maxAllowedPacket} bytes on your MySQL server. ` +
`Please set \`max_allowed_packet\` to at least ${this.binaryDataConfig.dbMaxFileSize} MiB in your MySQL server configuration.`,
);
}
}
connectionState.connected = true;
if (!inTest) this.scheduleNextPing();
}

View File

@@ -18,8 +18,6 @@ export const { type: dbType } = Container.get(GlobalConfig).database;
const timestampSyntax = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
postgresdb: 'CURRENT_TIMESTAMP(3)',
mysqldb: 'CURRENT_TIMESTAMP(3)',
mariadb: 'CURRENT_TIMESTAMP(3)',
}[dbType];
export const jsonColumnType = dbType === 'sqlite' ? 'simple-json' : 'json';
@@ -27,8 +25,6 @@ export const datetimeColumnType = dbType === 'postgresdb' ? 'timestamptz' : 'dat
const binaryColumnTypeMap = {
sqlite: 'blob',
postgresdb: 'bytea',
mysqldb: 'longblob',
mariadb: 'longblob',
} as const;
const binaryColumnType = binaryColumnTypeMap[dbType];

View File

@@ -29,7 +29,6 @@ export * from './subscribers';
export { Column as DslColumn } from './migrations/dsl/column';
export { CreateTable } from './migrations/dsl/table';
export { sqliteMigrations } from './migrations/sqlite';
export { mysqlMigrations } from './migrations/mysqldb';
export { postgresMigrations } from './migrations/postgresdb';
export { wrapMigration } from './migrations/migration-helpers';

View File

@@ -4,7 +4,7 @@ import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class UniqueWorkflowNames1620821879465 implements ReversibleMigration {
protected indexSuffix = '943d8f922be094eb507cb9a7f9';
async up({ isMysql, escape, runQuery }: MigrationContext) {
async up({ escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('workflow_entity');
const workflowNames: Array<Pick<WorkflowEntity, 'name'>> = await runQuery(
`SELECT name FROM ${tableName}`,
@@ -30,18 +30,11 @@ export class UniqueWorkflowNames1620821879465 implements ReversibleMigration {
}
const indexName = escape.indexName(this.indexSuffix);
await runQuery(
isMysql
? `ALTER TABLE ${tableName} ADD UNIQUE INDEX ${indexName} (${escape.columnName('name')})`
: `CREATE UNIQUE INDEX ${indexName} ON ${tableName} ("name")`,
);
await runQuery(`CREATE UNIQUE INDEX ${indexName} ON ${tableName} ("name")`);
}
async down({ isMysql, escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('workflow_entity');
async down({ escape, runQuery }: MigrationContext) {
const indexName = escape.indexName(this.indexSuffix);
await runQuery(
isMysql ? `ALTER TABLE ${tableName} DROP INDEX ${indexName}` : `DROP INDEX ${indexName}`,
);
await runQuery(`DROP INDEX ${indexName}`);
}
}

View File

@@ -3,7 +3,7 @@ import { LDAP_FEATURE_NAME, LDAP_DEFAULT_CONFIGURATION } from '@n8n/constants';
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateLdapEntities1674509946020 implements ReversibleMigration {
async up({ escape, dbType, isMysql, runQuery }: MigrationContext) {
async up({ escape, dbType, runQuery }: MigrationContext) {
const userTable = escape.tableName('user');
await runQuery(`ALTER TABLE ${userTable} ADD COLUMN disabled BOOLEAN NOT NULL DEFAULT false;`);
@@ -23,7 +23,7 @@ export class CreateLdapEntities1674509946020 implements ReversibleMigration {
${escape.columnName('createdAt')} timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
${escape.columnName('updatedAt')} timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY(${escape.columnName('providerId')}, ${escape.columnName('providerType')})
)${isMysql ? "ENGINE='InnoDB'" : ''}`,
)`,
);
const idColumn =
@@ -53,8 +53,7 @@ export class CreateLdapEntities1674509946020 implements ReversibleMigration {
${escape.columnName('updated')} INTEGER NOT NULL,
${escape.columnName('disabled')} INTEGER NOT NULL,
${escape.columnName('error')} TEXT
${isMysql ? ',PRIMARY KEY (`id`)' : ''}
)${isMysql ? "ENGINE='InnoDB'" : ''}`,
)`,
);
}

View File

@@ -42,7 +42,6 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration {
private async migrateUp(
table: Table,
{
dbType,
escape,
runQuery,
schemaBuilder: { addNotNull, addColumns, dropColumns, dropForeignKey, column },
@@ -58,8 +57,7 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration {
const roleColumnName = table === 'user' ? 'globalRoleId' : 'roleId';
const roleColumn = escape.columnName(roleColumnName);
const scope = roleScopes[table];
const isMySQL = ['mariadb', 'mysqldb'].includes(dbType);
const roleField = isMySQL ? `CONCAT('${scope}:', R.name)` : `'${scope}:' || R.name`;
const roleField = `'${scope}:' || R.name`;
const subQuery = `
SELECT ${roleField} as role, T.${idColumn} as id${
table !== 'user' ? `, T.${uidColumn} as uid` : ''
@@ -70,11 +68,7 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration {
const where = `WHERE ${tableName}.${idColumn} = mapping.id${
table !== 'user' ? ` AND ${tableName}.${uidColumn} = mapping.uid` : ''
}`;
const swQuery = isMySQL
? `UPDATE ${tableName}, (${subQuery}) as mapping
SET ${tableName}.role = mapping.role
${where}`
: `UPDATE ${tableName}
const swQuery = `UPDATE ${tableName}
SET role = mapping.role
FROM (${subQuery}) as mapping
${where}`;
@@ -94,7 +88,6 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration {
private async migrateDown(
table: Table,
{
dbType,
escape,
runQuery,
schemaBuilder: { addNotNull, addColumns, dropColumns, addForeignKey, column },
@@ -110,8 +103,7 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration {
const uidColumn = escape.columnName(uidColumns[table]);
const roleColumn = escape.columnName(roleColumnName);
const scope = roleScopes[table];
const isMySQL = ['mariadb', 'mysqldb'].includes(dbType);
const roleField = isMySQL ? `CONCAT('${scope}:', R.name)` : `'${scope}:' || R.name`;
const roleField = `'${scope}:' || R.name`;
const subQuery = `
SELECT R.id as role_id, T.${idColumn} as id${table !== 'user' ? `, T.${uidColumn} as uid` : ''}
FROM ${tableName} T
@@ -120,11 +112,7 @@ export class DropRoleMapping1705429061930 implements ReversibleMigration {
const where = `WHERE ${tableName}.${idColumn} = mapping.id${
table !== 'user' ? ` AND ${tableName}.${uidColumn} = mapping.uid` : ''
}`;
const query = isMySQL
? `UPDATE ${tableName}, (${subQuery}) as mapping
SET ${tableName}.${roleColumn} = mapping.role_id
${where}`
: `UPDATE ${tableName}
const query = `UPDATE ${tableName}
SET ${roleColumn} = mapping.role_id
FROM (${subQuery}) as mapping
${where}`;

View File

@@ -1,6 +1,5 @@
import type { ProjectRole } from '@n8n/permissions';
import { UserError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import type { User } from '../../entities';
import { generateNanoId } from '../../utils/generators';
@@ -76,7 +75,6 @@ export class CreateProject1714133768519 implements ReversibleMigration {
relationTableName: RelationTable,
{
escape,
isMysql,
runQuery,
schemaBuilder: { addForeignKey, addColumns, addNotNull, createIndex, column },
}: MigrationContext,
@@ -97,11 +95,7 @@ export class CreateProject1714133768519 implements ReversibleMigration {
ON T.${c.userId} = S.${c.userId}
WHERE P.id IS NOT NULL
`;
const swQuery = isMysql
? `UPDATE ${relationTable}, (${subQuery}) as mapping
SET ${relationTable}.${c.projectId} = mapping.${c.projectId}
WHERE ${relationTable}.${c.userId} = mapping.${c.userId}`
: `UPDATE ${relationTable}
const swQuery = `UPDATE ${relationTable}
SET ${c.projectId} = mapping.${c.projectId}
FROM (${subQuery}) as mapping
WHERE ${relationTable}.${c.userId} = mapping.${c.userId}`;
@@ -237,7 +231,7 @@ export class CreateProject1714133768519 implements ReversibleMigration {
await this.alterSharedWorkflow(context);
}
async down({ isMysql, logger, escape, runQuery, schemaBuilder: sb }: MigrationContext) {
async down({ logger, escape, runQuery, schemaBuilder: sb }: MigrationContext) {
const { t, c } = escapeNames(escape);
// 0. check if all projects are personal projects
@@ -264,11 +258,7 @@ export class CreateProject1714133768519 implements ReversibleMigration {
tableName: 'workflow_entity',
columnName: 'id',
onDelete: 'CASCADE',
// In MySQL foreignKey names must be unique across all tables and
// TypeORM creates predictable names based on the columnName.
// So the current shared_workflow table's foreignKey for workflowId would
// clash with this one if we don't create a random name.
name: isMysql ? nanoid() : undefined,
name: undefined,
})
.withForeignKey('userId', {
tableName: table.user,
@@ -302,11 +292,7 @@ export class CreateProject1714133768519 implements ReversibleMigration {
tableName: 'credentials_entity',
columnName: 'id',
onDelete: 'CASCADE',
// In MySQL foreignKey names must be unique across all tables and
// TypeORM creates predictable names based on the columnName.
// So the current shared_credentials table's foreignKey for credentialsId would
// clash with this one if we don't create a random name.
name: isMysql ? nanoid() : undefined,
name: undefined,
})
.withForeignKey('userId', {
tableName: table.user,

View File

@@ -20,9 +20,6 @@ export class AddConstraintToExecutionMetadata1720101653148 implements Reversible
.withColumns(
column('id').int.notNull.primary.autoGenerate,
column('executionId').int.notNull,
// NOTE: This is a varchar(255) instead of text, because a unique index
// on text is not supported on mysql, also why should we support
// arbitrary length keys?
column('key').varchar(255).notNull,
column('value').text.notNull,
)
@@ -30,25 +27,11 @@ export class AddConstraintToExecutionMetadata1720101653148 implements Reversible
tableName: 'execution_entity',
columnName: 'id',
onDelete: 'CASCADE',
// In MySQL foreignKey names must be unique across all tables and
// TypeORM creates predictable names based on the columnName.
// So the temp table's foreignKey clashes with the current table's.
name: context.isMysql ? nanoid() : undefined,
name: undefined,
})
.withIndexOn(['executionId', 'key'], true);
if (context.isMysql) {
await context.runQuery(`
INSERT INTO ${executionMetadataTableTemp} (${id}, ${executionId}, ${key}, ${value})
SELECT MAX(${id}) as ${id}, ${executionId}, ${key}, MAX(${value})
FROM ${executionMetadataTable}
GROUP BY ${executionId}, ${key}
ON DUPLICATE KEY UPDATE
id = IF(VALUES(${id}) > ${executionMetadataTableTemp}.${id}, VALUES(${id}), ${executionMetadataTableTemp}.${id}),
value = IF(VALUES(${id}) > ${executionMetadataTableTemp}.${id}, VALUES(${value}), ${executionMetadataTableTemp}.${value});
`);
} else {
await context.runQuery(`
await context.runQuery(`
INSERT INTO ${executionMetadataTableTemp} (${id}, ${executionId}, ${key}, ${value})
SELECT MAX(${id}) as ${id}, ${executionId}, ${key}, MAX(${value})
FROM ${executionMetadataTable}
@@ -58,7 +41,6 @@ export class AddConstraintToExecutionMetadata1720101653148 implements Reversible
value = EXCLUDED.value
WHERE EXCLUDED.id > ${executionMetadataTableTemp}.id;
`);
}
await dropTable(executionMetadataTableRaw);
await context.runQuery(
@@ -94,10 +76,7 @@ export class AddConstraintToExecutionMetadata1720101653148 implements Reversible
tableName: 'execution_entity',
columnName: 'id',
onDelete: 'CASCADE',
// In MySQL foreignKey names must be unique across all tables and
// TypeORM creates predictable names based on the columnName.
// So the temp table's foreignKey clashes with the current table's.
name: context.isMysql ? nanoid() : undefined,
name: undefined,
});
await context.runQuery(`

View File

@@ -13,10 +13,6 @@ import type { MigrationContext, ReversibleMigration } from '../migration-types';
* - `waitTill`
* - `status, workflowId`
*
* Remove unused indices in MySQL:
*
* - `status`
*
* Remove unused indices in all DBs:
*
* - `waitTill, id`
@@ -31,7 +27,7 @@ import type { MigrationContext, ReversibleMigration } from '../migration-types';
* - `deletedAt` for query at `ExecutionRepository.hardDeleteSoftDeletedExecutions`
*/
export class RefactorExecutionIndices1723796243146 implements ReversibleMigration {
async up({ schemaBuilder, isPostgres, isSqlite, isMysql, runQuery, escape }: MigrationContext) {
async up({ schemaBuilder, isPostgres, isSqlite, runQuery, escape }: MigrationContext) {
if (isSqlite || isPostgres) {
const executionEntity = escape.tableName('execution_entity');
@@ -59,10 +55,6 @@ export class RefactorExecutionIndices1723796243146 implements ReversibleMigratio
ON ${executionEntity} (${stoppedAt}, ${status}, ${deletedAt})
WHERE ${stoppedAt} IS NOT NULL AND ${deletedAt} IS NULL;
`);
} else if (isMysql) {
await schemaBuilder.createIndex('execution_entity', ['workflowId', 'startedAt']);
await schemaBuilder.createIndex('execution_entity', ['waitTill', 'status', 'deletedAt']);
await schemaBuilder.createIndex('execution_entity', ['stoppedAt', 'status', 'deletedAt']);
}
if (isSqlite) {
@@ -77,13 +69,6 @@ export class RefactorExecutionIndices1723796243146 implements ReversibleMigratio
});
}
if (isMysql) {
await schemaBuilder.dropIndex('execution_entity', ['status'], {
customIndexName: 'IDX_8b6f3f9ae234f137d707b98f3bf43584',
skipIfMissing: true,
});
}
// all DBs
await schemaBuilder.dropIndex(
@@ -98,10 +83,9 @@ export class RefactorExecutionIndices1723796243146 implements ReversibleMigratio
skipIfMissing: true,
});
await schemaBuilder.dropIndex('execution_entity', ['workflowId', 'id'], {
customIndexName:
isPostgres || isMysql
? 'idx_execution_entity_workflow_id_id'
: 'IDX_81fc04c8a17de15835713505e4',
customIndexName: isPostgres
? 'idx_execution_entity_workflow_id_id'
: 'IDX_81fc04c8a17de15835713505e4',
skipIfMissing: true,
});
}

View File

@@ -61,7 +61,6 @@ export class AddApiKeysTable1724951148974 implements ReversibleMigration {
runQuery,
schemaBuilder: { dropTable, addColumns, createIndex, column },
escape,
isMysql,
}: MigrationContext) {
const userTable = escape.tableName('user');
const userApiKeysTable = escape.tableName('user_api_keys');
@@ -74,16 +73,7 @@ export class AddApiKeysTable1724951148974 implements ReversibleMigration {
await createIndex('user', ['apiKey'], true);
const queryToGetUsersApiKeys = isMysql
? `
SELECT ${userIdColumn},
${apiKeyColumn},
${createdAtColumn}
FROM ${userApiKeysTable} u
WHERE ${createdAtColumn} = (SELECT Min(${createdAtColumn})
FROM ${userApiKeysTable}
WHERE ${userIdColumn} = u.${userIdColumn});`
: `
const queryToGetUsersApiKeys = `
SELECT DISTINCT ON
(${userIdColumn}) ${userIdColumn},
${apiKeyColumn}, ${createdAtColumn}

View File

@@ -2,33 +2,23 @@ import type { MigrationContext, ReversibleMigration } from '../migration-types';
const processedDataTableName = 'processed_data';
export class UpdateProcessedDataValueColumnToText1729607673464 implements ReversibleMigration {
async up({ schemaBuilder: { addNotNull }, isMysql, runQuery, tablePrefix }: MigrationContext) {
async up({ schemaBuilder: { addNotNull }, runQuery, tablePrefix }: MigrationContext) {
const prefixedTableName = `${tablePrefix}${processedDataTableName}`;
await runQuery(`ALTER TABLE ${prefixedTableName} ADD COLUMN value_temp TEXT;`);
await runQuery(`UPDATE ${prefixedTableName} SET value_temp = value;`);
await runQuery(`ALTER TABLE ${prefixedTableName} DROP COLUMN value;`);
if (isMysql) {
await runQuery(`ALTER TABLE ${prefixedTableName} CHANGE value_temp value TEXT NOT NULL;`);
} else {
await runQuery(`ALTER TABLE ${prefixedTableName} RENAME COLUMN value_temp TO value`);
await addNotNull(processedDataTableName, 'value');
}
await runQuery(`ALTER TABLE ${prefixedTableName} RENAME COLUMN value_temp TO value`);
await addNotNull(processedDataTableName, 'value');
}
async down({ schemaBuilder: { addNotNull }, isMysql, runQuery, tablePrefix }: MigrationContext) {
async down({ schemaBuilder: { addNotNull }, runQuery, tablePrefix }: MigrationContext) {
const prefixedTableName = `${tablePrefix}${processedDataTableName}`;
await runQuery(`ALTER TABLE ${prefixedTableName} ADD COLUMN value_temp VARCHAR(255);`);
await runQuery(`UPDATE ${prefixedTableName} SET value_temp = value;`);
await runQuery(`ALTER TABLE ${prefixedTableName} DROP COLUMN value;`);
if (isMysql) {
await runQuery(
`ALTER TABLE ${prefixedTableName} CHANGE value_temp value VARCHAR(255) NOT NULL;`,
);
} else {
await runQuery(`ALTER TABLE ${prefixedTableName} RENAME COLUMN value_temp TO value`);
await addNotNull(processedDataTableName, 'value');
}
await runQuery(`ALTER TABLE ${prefixedTableName} RENAME COLUMN value_temp TO value`);
await addNotNull(processedDataTableName, 'value');
}
}

View File

@@ -9,7 +9,6 @@ export class ClearEvaluation1745322634000 implements IrreversibleMigration {
tablePrefix,
isSqlite,
isPostgres,
isMysql,
}: MigrationContext) {
// Drop test_metric, test_definition
await dropTable(testCaseExecutionTableName);
@@ -19,8 +18,6 @@ export class ClearEvaluation1745322634000 implements IrreversibleMigration {
await queryRunner.query(`DROP TABLE IF EXISTS ${tablePrefix}test_definition;`);
} else if (isPostgres) {
await queryRunner.query(`DROP TABLE IF EXISTS ${tablePrefix}test_definition CASCADE;`);
} else if (isMysql) {
await queryRunner.query(`DROP TABLE IF EXISTS ${tablePrefix}test_definition CASCADE;`);
}
await createTable(testRunTableName)

View File

@@ -24,7 +24,6 @@ export class AddRolesTables1750252139167 implements ReversibleMigration {
schemaBuilder: { createTable, column, createIndex },
queryRunner,
tablePrefix,
dbType,
}: MigrationContext) {
await createTable('role').withColumns(
column('slug')
@@ -42,30 +41,15 @@ export class AddRolesTables1750252139167 implements ReversibleMigration {
.notNull.comment('Indicates if the role is managed by the system and cannot be edited'),
);
// MYSQL
if (dbType === 'postgresdb' || dbType === 'sqlite') {
// POSTGRES
await queryRunner.query(
`CREATE TABLE ${tablePrefix}role_scope (
"roleSlug" VARCHAR(128) NOT NULL,
"scopeSlug" VARCHAR(128) NOT NULL,
CONSTRAINT "PK_${tablePrefix}role_scope" PRIMARY KEY ("roleSlug", "scopeSlug"),
CONSTRAINT "FK_${tablePrefix}role" FOREIGN KEY ("roleSlug") REFERENCES ${tablePrefix}role ("slug") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "FK_${tablePrefix}scope" FOREIGN KEY ("scopeSlug") REFERENCES "${tablePrefix}scope" ("slug") ON DELETE CASCADE ON UPDATE CASCADE
);`,
);
} else {
// MYSQL
await queryRunner.query(
`CREATE TABLE ${tablePrefix}role_scope (
\`roleSlug\` VARCHAR(128) NOT NULL,
\`scopeSlug\` VARCHAR(128) NOT NULL,
FOREIGN KEY (\`scopeSlug\`) REFERENCES ${tablePrefix}scope (\`slug\`) ON DELETE CASCADE ON UPDATE CASCADE,
FOREIGN KEY (\`roleSlug\`) REFERENCES ${tablePrefix}role (\`slug\`) ON DELETE CASCADE ON UPDATE CASCADE,
PRIMARY KEY (\`roleSlug\`, \`scopeSlug\`)
) ENGINE=InnoDB;`,
);
}
await queryRunner.query(
`CREATE TABLE ${tablePrefix}role_scope (
"roleSlug" VARCHAR(128) NOT NULL,
"scopeSlug" VARCHAR(128) NOT NULL,
CONSTRAINT "PK_${tablePrefix}role_scope" PRIMARY KEY ("roleSlug", "scopeSlug"),
CONSTRAINT "FK_${tablePrefix}role" FOREIGN KEY ("roleSlug") REFERENCES ${tablePrefix}role ("slug") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "FK_${tablePrefix}scope" FOREIGN KEY ("scopeSlug") REFERENCES "${tablePrefix}scope" ("slug") ON DELETE CASCADE ON UPDATE CASCADE
);`,
);
await createIndex('role_scope', ['scopeSlug']);
/*

View File

@@ -10,7 +10,6 @@ const VALUE_COLUMN_NAME = 'value';
export class ChangeValueTypesForInsights1759399811000 implements IrreversibleMigration {
async up({
isSqlite,
isMysql,
isPostgres,
escape,
copyTable,
@@ -76,13 +75,6 @@ export class ChangeValueTypesForInsights1759399811000 implements IrreversibleMig
await queryRunner.query(
`ALTER TABLE ${tempInsightsByPeriodTable} RENAME TO ${insightsByPeriodTable};`,
);
} else if (isMysql) {
await queryRunner.query(
`ALTER TABLE ${insightsRawTable} MODIFY COLUMN ${valueColumnName} BIGINT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${insightsByPeriodTable} MODIFY COLUMN ${valueColumnName} BIGINT NOT NULL;`,
);
} else if (isPostgres) {
await queryRunner.query(
`ALTER TABLE ${insightsRawTable} ALTER COLUMN ${valueColumnName} TYPE BIGINT;`,

View File

@@ -2,7 +2,7 @@ import type { Role } from '../../entities';
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class UniqueRoleNames1760020838000 implements ReversibleMigration {
async up({ isMysql, escape, runQuery }: MigrationContext) {
async up({ escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('role');
const displayNameColumn = escape.columnName('displayName');
const slugColumn = escape.columnName('slug');
@@ -43,22 +43,11 @@ export class UniqueRoleNames1760020838000 implements ReversibleMigration {
}
const indexName = escape.indexName('UniqueRoleDisplayName');
// MySQL cannot create an index on a column with a type of TEXT or BLOB without a length limit
// The (100) specifies the maximum length of the index key
// meaning that only the first 100 characters of the displayName column will be used for indexing
// But since in our DTOs we limit the displayName to 100 characters, we can safely use this prefix length
await runQuery(
isMysql
? `CREATE UNIQUE INDEX ${indexName} ON ${tableName} (${displayNameColumn}(100))`
: `CREATE UNIQUE INDEX ${indexName} ON ${tableName} (${displayNameColumn})`,
);
await runQuery(`CREATE UNIQUE INDEX ${indexName} ON ${tableName} (${displayNameColumn})`);
}
async down({ isMysql, escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('role');
async down({ escape, runQuery }: MigrationContext) {
const indexName = escape.indexName('UniqueRoleDisplayName');
await runQuery(
isMysql ? `ALTER TABLE ${tableName} DROP INDEX ${indexName}` : `DROP INDEX ${indexName}`,
);
await runQuery(`DROP INDEX ${indexName}`);
}
}

View File

@@ -8,7 +8,6 @@ export class ChangeOAuthStateColumnToUnboundedVarchar1763572724000
{
async up({
isSqlite,
isMysql,
isPostgres,
escape,
copyTable,
@@ -48,16 +47,6 @@ export class ChangeOAuthStateColumnToUnboundedVarchar1763572724000
await dropTable(TABLE_NAME);
await queryRunner.query(`ALTER TABLE ${tempTableName} RENAME TO ${tableName};`);
} else if (isMysql) {
await queryRunner.query(
`ALTER TABLE ${tableName} MODIFY COLUMN ${escape.columnName('state')} TEXT;`,
);
await queryRunner.query(
`ALTER TABLE ${tableName} MODIFY COLUMN ${escape.columnName('codeChallenge')} TEXT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tableName} MODIFY COLUMN ${escape.columnName('redirectUri')} TEXT NOT NULL;`,
);
} else if (isPostgres) {
await queryRunner.query(
`ALTER TABLE ${tableName} ALTER COLUMN ${escape.columnName('state')} TYPE VARCHAR,` +

View File

@@ -159,7 +159,6 @@ export class Column {
length,
primaryKeyConstraintName,
} = this;
const isMysql = 'mysql' in driver;
const isPostgres = 'postgres' in driver;
const isSqlite = 'sqlite' in driver;
@@ -173,8 +172,6 @@ export class Column {
if (options.type === 'int' && isSqlite) {
options.type = 'integer';
} else if (type === 'boolean' && isMysql) {
options.type = 'tinyint(1)';
} else if (type === 'timestamptz') {
options.type = isPostgres ? 'timestamptz' : 'datetime';
} else if (type === 'timestamp') {
@@ -182,15 +179,11 @@ export class Column {
} else if (type === 'json' && isSqlite) {
options.type = 'text';
} else if (type === 'uuid') {
// mysql does not support uuid type
if (isMysql) options.type = 'varchar(36)';
// we haven't been defining length on "uuid" varchar on sqlite
if (isSqlite) options.type = 'varchar';
} else if (type === 'double') {
if (isPostgres) {
options.type = 'double precision';
} else if (isMysql) {
options.type = 'double';
} else if (isSqlite) {
options.type = 'real';
}
@@ -199,8 +192,6 @@ export class Column {
} else if (type === 'binary') {
if (isPostgres) {
options.type = 'bytea';
} else if (isMysql) {
options.type = 'longblob';
} else if (isSqlite) {
options.type = 'blob';
}
@@ -220,7 +211,7 @@ export class Column {
if (isGenerated2) {
options.isGenerated = true;
options.generationStrategy = type === 'uuid' ? 'uuid' : isMysql ? 'increment' : 'identity';
options.generationStrategy = type === 'uuid' ? 'uuid' : 'identity';
}
if (isPrimary || isGenerated || isGenerated2) {

View File

@@ -123,7 +123,6 @@ export class CreateTable extends TableOperation {
...(uniqueConstraints.size ? { uniques: [...uniqueConstraints] } : {}),
...(foreignKeys.size ? { foreignKeys: [...foreignKeys] } : {}),
...(checks.size ? { checks: [...checks] } : {}),
...('mysql' in driver ? { engine: 'InnoDB' } : {}),
}),
true,
);

View File

@@ -90,17 +90,15 @@ function parseJson<T>(data: string | T): T {
const globalConfig = Container.get(GlobalConfig);
const dbType = globalConfig.database.type;
const isMysql = ['mariadb', 'mysqldb'].includes(dbType);
const isSqlite = dbType === 'sqlite';
const isPostgres = dbType === 'postgresdb';
const dbName = globalConfig.database[dbType === 'mariadb' ? 'mysqldb' : dbType].database;
const dbName = globalConfig.database[dbType].database;
const tablePrefix = globalConfig.database.tablePrefix;
const createContext = (queryRunner: QueryRunner, migration: Migration): MigrationContext => ({
logger: Container.get(Logger),
tablePrefix,
dbType,
isMysql,
isSqlite,
isPostgres,
dbName,

View File

@@ -3,14 +3,13 @@ import type { QueryRunner, ObjectLiteral } from '@n8n/typeorm';
import type { createSchemaBuilder } from './dsl';
export type DatabaseType = 'mariadb' | 'postgresdb' | 'mysqldb' | 'sqlite';
export type DatabaseType = 'postgresdb' | 'sqlite';
export interface MigrationContext {
logger: Logger;
queryRunner: QueryRunner;
tablePrefix: string;
dbType: DatabaseType;
isMysql: boolean;
isSqlite: boolean;
isPostgres: boolean;
dbName: string;

View File

@@ -1,45 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class InitialMigration1588157391238 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `' +
tablePrefix +
'credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_' +
tablePrefix +
'07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB',
);
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `' +
tablePrefix +
'execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_' +
tablePrefix +
'c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB',
);
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS`' +
tablePrefix +
'workflow_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `active` tinyint NOT NULL, `nodes` json NOT NULL, `connections` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, `settings` json NULL, `staticData` json NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query('DROP TABLE `' + tablePrefix + 'workflow_entity`');
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'c4d999a5e90784e8caccf5589d` ON `' +
tablePrefix +
'execution_entity`',
);
await queryRunner.query('DROP TABLE `' + tablePrefix + 'execution_entity`');
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'07fde106c0b471d8cc80a64fc8` ON `' +
tablePrefix +
'credentials_entity`',
);
await queryRunner.query('DROP TABLE `' + tablePrefix + 'credentials_entity`');
}
}

View File

@@ -1,13 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class WebhookModel1592447867632 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
}
}

View File

@@ -1,23 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateIndexStoppedAt1594902918301 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'CREATE INDEX `IDX_' +
tablePrefix +
'cefb067df2402f6aed0638a6c1` ON `' +
tablePrefix +
'execution_entity` (`stoppedAt`)',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'cefb067df2402f6aed0638a6c1` ON `' +
tablePrefix +
'execution_entity`',
);
}
}

View File

@@ -1,15 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class MakeStoppedAtNullable1607431743767 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL',
);
}
}

View File

@@ -1,35 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddWebhookId1611149998770 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL',
);
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL',
);
await queryRunner.query(
'CREATE INDEX `IDX_' +
tablePrefix +
'742496f199721a057051acf4c2` ON `' +
tablePrefix +
'webhook_entity` (`webhookId`, `method`, `pathLength`)',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'742496f199721a057051acf4c2` ON `' +
tablePrefix +
'webhook_entity`',
);
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'webhook_entity` DROP COLUMN `pathLength`',
);
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'webhook_entity` DROP COLUMN `webhookId`',
);
}
}

View File

@@ -1,15 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class ChangeDataSize1615306975123 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL',
);
}
}

View File

@@ -1,150 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateTagEntity1617268711084 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
// create tags table + relationship with workflow entity
await queryRunner.query(
'CREATE TABLE `' +
tablePrefix +
'tag_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(24) NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, UNIQUE INDEX `IDX_' +
tablePrefix +
'8f949d7a3a984759044054e89b` (`name`), PRIMARY KEY (`id`)) ENGINE=InnoDB',
);
await queryRunner.query(
'CREATE TABLE `' +
tablePrefix +
'workflows_tags` (`workflowId` int NOT NULL, `tagId` int NOT NULL, INDEX `IDX_' +
tablePrefix +
'54b2f0343d6a2078fa13744386` (`workflowId`), INDEX `IDX_' +
tablePrefix +
'77505b341625b0b4768082e217` (`tagId`), PRIMARY KEY (`workflowId`, `tagId`)) ENGINE=InnoDB',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflows_tags` ADD CONSTRAINT `FK_' +
tablePrefix +
'54b2f0343d6a2078fa137443869` FOREIGN KEY (`workflowId`) REFERENCES `' +
tablePrefix +
'workflow_entity`(`id`) ON DELETE CASCADE ON UPDATE NO ACTION',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflows_tags` ADD CONSTRAINT `FK_' +
tablePrefix +
'77505b341625b0b4768082e2171` FOREIGN KEY (`tagId`) REFERENCES `' +
tablePrefix +
'tag_entity`(`id`) ON DELETE CASCADE ON UPDATE NO ACTION',
);
// set default dates for `createdAt` and `updatedAt`
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'credentials_entity` CHANGE `createdAt` `createdAt` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3)',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'credentials_entity` CHANGE `updatedAt` `updatedAt` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'tag_entity` CHANGE `createdAt` `createdAt` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3)',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'tag_entity` CHANGE `updatedAt` `updatedAt` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflow_entity` CHANGE `createdAt` `createdAt` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3)',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflow_entity` CHANGE `updatedAt` `updatedAt` datetime(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3) ON UPDATE CURRENT_TIMESTAMP(3)',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
// `createdAt` and `updatedAt`
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflow_entity` CHANGE `updatedAt` `updatedAt` datetime NOT NULL',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflow_entity` CHANGE `createdAt` `createdAt` datetime NOT NULL',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'tag_entity` CHANGE `updatedAt` `updatedAt` datetime NOT NULL',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'tag_entity` CHANGE `createdAt` `createdAt` datetime NOT NULL',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'credentials_entity` CHANGE `updatedAt` `updatedAt` datetime NOT NULL',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'credentials_entity` CHANGE `createdAt` `createdAt` datetime NOT NULL',
);
// tags
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflows_tags` DROP FOREIGN KEY `FK_' +
tablePrefix +
'77505b341625b0b4768082e2171`',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'workflows_tags` DROP FOREIGN KEY `FK_' +
tablePrefix +
'54b2f0343d6a2078fa137443869`',
);
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'77505b341625b0b4768082e217` ON `' +
tablePrefix +
'workflows_tags`',
);
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'54b2f0343d6a2078fa13744386` ON `' +
tablePrefix +
'workflows_tags`',
);
await queryRunner.query('DROP TABLE `' + tablePrefix + 'workflows_tags`');
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'8f949d7a3a984759044054e89b` ON `' +
tablePrefix +
'tag_entity`',
);
await queryRunner.query('DROP TABLE `' + tablePrefix + 'tag_entity`');
}
}

View File

@@ -1,19 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class ChangeCredentialDataSize1620729500000 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'credentials_entity` MODIFY COLUMN `type` varchar(128) NOT NULL',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'credentials_entity` MODIFY COLUMN `type` varchar(32) NOT NULL',
);
}
}

View File

@@ -1,3 +0,0 @@
import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames';
export class UniqueWorkflowNames1620826335440 extends UniqueWorkflowNames1620821879465 {}

View File

@@ -1,38 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class CertifyCorrectCollation1623936588000 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix, dbType, dbName }: MigrationContext) {
if (dbType === 'mariadb') {
// This applies to MySQL only.
return;
}
const checkCollationExistence = (await queryRunner.query(
"show collation where collation like 'utf8mb4_0900_ai_ci';",
)) as unknown[];
let collation = 'utf8mb4_general_ci';
if (checkCollationExistence.length > 0) {
collation = 'utf8mb4_0900_ai_ci';
}
await queryRunner.query(
`ALTER DATABASE \`${dbName}\` CHARACTER SET utf8mb4 COLLATE ${collation};`,
);
for (const tableName of [
'credentials_entity',
'execution_entity',
'tag_entity',
'webhook_entity',
'workflow_entity',
'workflows_tags',
]) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}${tableName} CONVERT TO CHARACTER SET utf8mb4 COLLATE ${collation};`,
);
}
}
// There is no down migration in this case as we already expect default collation to be utf8mb4
// The up migration exists simply to enforce that n8n will work with older mysql versions
}

View File

@@ -1,29 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddWaitColumnId1626183952959 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_entity` ADD `waitTill` DATETIME NULL',
);
await queryRunner.query(
'CREATE INDEX `IDX_' +
tablePrefix +
'ca4a71b47f28ac6ea88293a8e2` ON `' +
tablePrefix +
'execution_entity` (`waitTill`)',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'DROP INDEX `IDX_' +
tablePrefix +
'ca4a71b47f28ac6ea88293a8e2` ON `' +
tablePrefix +
'execution_entity`',
);
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_entity` DROP COLUMN `waitTill`',
);
}
}

View File

@@ -1,3 +0,0 @@
import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials';
export class UpdateWorkflowCredentials1630451444017 extends UpdateWorkflowCredentials1630330987096 {}

View File

@@ -1,51 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddExecutionEntityIndexes1644424784709 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}c4d999a5e90784e8caccf5589d\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}06da892aaf92a48e7d3e400003\` ON \`${tablePrefix}execution_entity\` (\`workflowId\`, \`waitTill\`, \`id\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}78d62b89dc1433192b86dce18a\` ON \`${tablePrefix}execution_entity\` (\`workflowId\`, \`finished\`, \`id\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}1688846335d274033e15c846a4\` ON \`${tablePrefix}execution_entity\` (\`finished\`, \`id\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9\` ON \`${tablePrefix}execution_entity\` (\`waitTill\`, \`id\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}81fc04c8a17de15835713505e4\` ON \`${tablePrefix}execution_entity\` (\`workflowId\`, \`id\`)`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}81fc04c8a17de15835713505e4\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}1688846335d274033e15c846a4\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}78d62b89dc1433192b86dce18a\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}06da892aaf92a48e7d3e400003\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2\` ON \`${tablePrefix}execution_entity\` (\`waitTill\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}c4d999a5e90784e8caccf5589d\` ON \`${tablePrefix}execution_entity\` (\`workflowId\`)`,
);
}
}

View File

@@ -1,174 +0,0 @@
import { v4 as uuid } from 'uuid';
import type { InsertResult, MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateUserManagement1646992772331 implements ReversibleMigration {
async up({ queryRunner, tablePrefix, loadSurveyFromDisk }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE ${tablePrefix}role (
\`id\` int NOT NULL AUTO_INCREMENT,
\`name\` varchar(32) NOT NULL,
\`scope\` varchar(255) NOT NULL,
\`createdAt\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`updatedAt\` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (\`id\`),
UNIQUE KEY \`UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8\` (\`scope\`,\`name\`)
) ENGINE=InnoDB;`,
);
await queryRunner.query(
`CREATE TABLE ${tablePrefix}user (
\`id\` VARCHAR(36) NOT NULL,
\`email\` VARCHAR(255) NULL DEFAULT NULL,
\`firstName\` VARCHAR(32) NULL DEFAULT NULL,
\`lastName\` VARCHAR(32) NULL DEFAULT NULL,
\`password\` VARCHAR(255) NULL DEFAULT NULL,
\`resetPasswordToken\` VARCHAR(255) NULL DEFAULT NULL,
\`resetPasswordTokenExpiration\` INT NULL DEFAULT NULL,
\`personalizationAnswers\` TEXT NULL DEFAULT NULL,
\`createdAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`updatedAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`globalRoleId\` INT NOT NULL,
PRIMARY KEY (\`id\`),
UNIQUE INDEX \`IDX_${tablePrefix}e12875dfb3b1d92d7d7c5377e2\` (\`email\` ASC),
INDEX \`FK_${tablePrefix}f0609be844f9200ff4365b1bb3d\` (\`globalRoleId\` ASC)
) ENGINE=InnoDB;`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}user\` ADD CONSTRAINT \`FK_${tablePrefix}f0609be844f9200ff4365b1bb3d\` FOREIGN KEY (\`globalRoleId\`) REFERENCES \`${tablePrefix}role\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`CREATE TABLE ${tablePrefix}shared_workflow (
\`createdAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`updatedAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`roleId\` INT NOT NULL,
\`userId\` VARCHAR(36) NOT NULL,
\`workflowId\` INT NOT NULL,
INDEX \`FK_${tablePrefix}3540da03964527aa24ae014b780x\` (\`roleId\` ASC),
INDEX \`FK_${tablePrefix}82b2fd9ec4e3e24209af8160282x\` (\`userId\` ASC),
INDEX \`FK_${tablePrefix}b83f8d2530884b66a9c848c8b88x\` (\`workflowId\` ASC),
PRIMARY KEY (\`userId\`, \`workflowId\`)
) ENGINE=InnoDB;`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}shared_workflow\` ADD CONSTRAINT \`FK_${tablePrefix}3540da03964527aa24ae014b780\` FOREIGN KEY (\`roleId\`) REFERENCES \`${tablePrefix}role\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}shared_workflow\` ADD CONSTRAINT \`FK_${tablePrefix}82b2fd9ec4e3e24209af8160282\` FOREIGN KEY (\`userId\`) REFERENCES \`${tablePrefix}user\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}shared_workflow\` ADD CONSTRAINT \`FK_${tablePrefix}b83f8d2530884b66a9c848c8b88\` FOREIGN KEY (\`workflowId\`) REFERENCES \`${tablePrefix}workflow_entity\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`CREATE TABLE ${tablePrefix}shared_credentials (
\`createdAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`updatedAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
\`roleId\` INT NOT NULL,
\`userId\` VARCHAR(36) NOT NULL,
\`credentialsId\` INT NOT NULL,
INDEX \`FK_${tablePrefix}c68e056637562000b68f480815a\` (\`roleId\` ASC),
INDEX \`FK_${tablePrefix}484f0327e778648dd04f1d70493\` (\`userId\` ASC),
INDEX \`FK_${tablePrefix}68661def1d4bcf2451ac8dbd949\` (\`credentialsId\` ASC),
PRIMARY KEY (\`userId\`, \`credentialsId\`)
) ENGINE=InnoDB;`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}shared_credentials\` ADD CONSTRAINT \`FK_${tablePrefix}484f0327e778648dd04f1d70493\` FOREIGN KEY (\`userId\`) REFERENCES \`${tablePrefix}user\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}shared_credentials\` ADD CONSTRAINT \`FK_${tablePrefix}68661def1d4bcf2451ac8dbd949\` FOREIGN KEY (\`credentialsId\`) REFERENCES \`${tablePrefix}credentials_entity\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}shared_credentials\` ADD CONSTRAINT \`FK_${tablePrefix}c68e056637562000b68f480815a\` FOREIGN KEY (\`roleId\`) REFERENCES \`${tablePrefix}role\`(\`id\`) ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`CREATE TABLE ${tablePrefix}settings (
\`key\` VARCHAR(255) NOT NULL,
\`value\` TEXT NOT NULL,
\`loadOnStartup\` TINYINT(1) NOT NULL DEFAULT 0,
PRIMARY KEY (\`key\`)
) ENGINE=InnoDB;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity DROP INDEX IDX_${tablePrefix}943d8f922be094eb507cb9a7f9`,
);
// Insert initial roles
await queryRunner.query(
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ("owner", "global");`,
);
const instanceOwnerRole = (await queryRunner.query(
'SELECT LAST_INSERT_ID() as insertId',
)) as InsertResult;
await queryRunner.query(
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ("member", "global");`,
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ("owner", "workflow");`,
);
const workflowOwnerRole = (await queryRunner.query(
'SELECT LAST_INSERT_ID() as insertId',
)) as InsertResult;
await queryRunner.query(
`INSERT INTO ${tablePrefix}role (name, scope) VALUES ("owner", "credential");`,
);
const credentialOwnerRole = (await queryRunner.query(
'SELECT LAST_INSERT_ID() as insertId',
)) as InsertResult;
const survey = loadSurveyFromDisk();
const ownerUserId = uuid();
await queryRunner.query(
`INSERT INTO ${tablePrefix}user (id, globalRoleId, personalizationAnswers) values (?, ?, ?)`,
[ownerUserId, instanceOwnerRole[0].insertId, survey],
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}shared_workflow (createdAt, updatedAt, roleId, userId, workflowId) select
NOW(), NOW(), '${workflowOwnerRole[0].insertId}', '${ownerUserId}', id FROM ${tablePrefix}workflow_entity`,
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}shared_credentials (createdAt, updatedAt, roleId, userId, credentialsId) SELECT NOW(), NOW(), '${credentialOwnerRole[0].insertId}', '${ownerUserId}', id FROM ${tablePrefix}credentials_entity`,
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}settings (\`key\`, value, loadOnStartup) VALUES ("userManagement.isInstanceOwnerSetUp", "false", 1), ("userManagement.skipInstanceOwnerSetup", "false", 1)`,
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}settings (\`key\`, value, loadOnStartup) VALUES ("ui.banners.dismissed", JSON_ARRAY('V1'), 1)`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD UNIQUE INDEX \`IDX_${tablePrefix}943d8f922be094eb507cb9a7f9\` (\`name\`)`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}shared_credentials"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}shared_workflow"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}user"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}role"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}settings"`);
}
}

View File

@@ -1,10 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class LowerCaseUserEmail1648740597343 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`
UPDATE ${tablePrefix}user
SET email = LOWER(email);
`);
}
}

View File

@@ -1,41 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CommunityNodes1652254514003 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE \`${tablePrefix}installed_packages\` (` +
'`packageName` char(214) NOT NULL,' +
'`installedVersion` char(50) NOT NULL,' +
'`authorName` char(70) NULL,' +
'`authorEmail` char(70) NULL,' +
'`createdAt` datetime NULL DEFAULT CURRENT_TIMESTAMP,' +
'`updatedAt` datetime NULL DEFAULT CURRENT_TIMESTAMP,' +
'PRIMARY KEY (`packageName`)' +
') ENGINE=InnoDB;',
);
await queryRunner.query(
`CREATE TABLE \`${tablePrefix}installed_nodes\` (` +
'`name` char(200) NOT NULL,' +
'`type` char(200) NOT NULL,' +
"`latestVersion` int NOT NULL DEFAULT '1'," +
'`package` char(214) NOT NULL,' +
'PRIMARY KEY (`name`),' +
`INDEX \`FK_${tablePrefix}73f857fc5dce682cef8a99c11dbddbc969618951\` (\`package\` ASC)` +
") ENGINE='InnoDB';",
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}installed_nodes\` ADD CONSTRAINT \`FK_${tablePrefix}73f857fc5dce682cef8a99c11dbddbc969618951\` FOREIGN KEY (\`package\`) REFERENCES \`${tablePrefix}installed_packages\`(\`packageName\`) ON DELETE CASCADE ON UPDATE CASCADE`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD UNIQUE INDEX \`IDX_${tablePrefix}943d8f922be094eb507cb9a7f9\` (\`name\`)`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}installed_nodes"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}installed_packages"`);
}
}

View File

@@ -1,18 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddUserSettings1652367743993 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'user` ADD COLUMN `settings` json NULL DEFAULT NULL',
);
await queryRunner.query(
'ALTER TABLE `' +
tablePrefix +
'user` CHANGE COLUMN `personalizationAnswers` `personalizationAnswers` json NULL DEFAULT NULL',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'user` DROP COLUMN `settings`');
}
}

View File

@@ -1,23 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddAPIKeyColumn1652905585850 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'user` ADD COLUMN `apiKey` VARCHAR(255)',
);
await queryRunner.query(
'CREATE UNIQUE INDEX `UQ_' +
tablePrefix +
'ie0zomxves9w3p774drfrkxtj5` ON `' +
tablePrefix +
'user` (`apiKey`)',
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`DROP INDEX \`UQ_${tablePrefix}ie0zomxves9w3p774drfrkxtj5\` ON \`${tablePrefix}user\``,
);
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'user` DROP COLUMN `apiKey`');
}
}

View File

@@ -1,13 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class IntroducePinData1654090101303 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`ALTER TABLE \`${tablePrefix}workflow_entity\` ADD \`pinData\` json`);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}workflow_entity\` DROP COLUMN \`pinData\``,
);
}
}

View File

@@ -1,3 +0,0 @@
import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds';
export class AddNodeIds1658932910559 extends AddNodeIds1658930531669 {}

View File

@@ -1,3 +0,0 @@
import { AddJsonKeyPinData1659888469333 } from '../common/1659888469333-AddJsonKeyPinData';
export class AddJsonKeyPinData1659895550980 extends AddJsonKeyPinData1659888469333 {}

View File

@@ -1,16 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateCredentialsUserRole1660062385367 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`
INSERT IGNORE INTO ${tablePrefix}role (name, scope)
VALUES ("user", "credential");
`);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`
DELETE FROM ${tablePrefix}role WHERE name='user' AND scope='credential';
`);
}
}

View File

@@ -1,16 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateWorkflowsEditorRole1663755770894 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`
INSERT IGNORE INTO ${tablePrefix}role (name, scope)
VALUES ("editor", "workflow")
`);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`
DELETE FROM ${tablePrefix}role WHERE name='user' AND scope='workflow';
`);
}
}

View File

@@ -1,26 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class WorkflowStatistics1664196174002 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE ${tablePrefix}workflow_statistics (
count INTEGER DEFAULT 0,
latestEvent DATETIME,
name VARCHAR(128) NOT NULL,
workflowId INTEGER,
PRIMARY KEY(workflowId, name),
FOREIGN KEY(workflowId) REFERENCES ${tablePrefix}workflow_entity(id) ON DELETE CASCADE
)`,
);
// Add dataLoaded column to workflow table
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN dataLoaded BOOLEAN DEFAULT false`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_statistics"`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN dataLoaded`);
}
}

View File

@@ -1,28 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateCredentialUsageTable1665484192213 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE \`${tablePrefix}credential_usage\` (` +
'`workflowId` int NOT NULL,' +
'`nodeId` char(200) NOT NULL,' +
"`credentialId` int NOT NULL DEFAULT '1'," +
'`createdAt` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'`updatedAt` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'PRIMARY KEY (`workflowId`, `nodeId`, `credentialId`)' +
") ENGINE='InnoDB';",
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}credential_usage\` ADD CONSTRAINT \`FK_${tablePrefix}518e1ece107b859ca6ce9ed2487f7e23\` FOREIGN KEY (\`workflowId\`) REFERENCES \`${tablePrefix}workflow_entity\`(\`id\`) ON DELETE CASCADE ON UPDATE CASCADE`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}credential_usage\` ADD CONSTRAINT \`FK_${tablePrefix}7ce200a20ade7ae89fa7901da896993f\` FOREIGN KEY (\`credentialId\`) REFERENCES \`${tablePrefix}credentials_entity\`(\`id\`) ON DELETE CASCADE ON UPDATE CASCADE`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE "${tablePrefix}credential_usage"`);
}
}

View File

@@ -1,28 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class RemoveCredentialUsageTable1665754637026 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE \`${tablePrefix}credential_usage\``);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE \`${tablePrefix}credential_usage\` (` +
'`workflowId` int NOT NULL,' +
'`nodeId` char(200) NOT NULL,' +
"`credentialId` int NOT NULL DEFAULT '1'," +
'`createdAt` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'`updatedAt` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,' +
'PRIMARY KEY (`workflowId`, `nodeId`, `credentialId`)' +
") ENGINE='InnoDB';",
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}credential_usage\` ADD CONSTRAINT \`FK_${tablePrefix}518e1ece107b859ca6ce9ed2487f7e23\` FOREIGN KEY (\`workflowId\`) REFERENCES \`${tablePrefix}workflow_entity\`(\`id\`) ON DELETE CASCADE ON UPDATE CASCADE`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}credential_usage\` ADD CONSTRAINT \`FK_${tablePrefix}7ce200a20ade7ae89fa7901da896993f\` FOREIGN KEY (\`credentialId\`) REFERENCES \`${tablePrefix}credentials_entity\`(\`id\`) ON DELETE CASCADE ON UPDATE CASCADE`,
);
}
}

View File

@@ -1,3 +0,0 @@
import { AddWorkflowVersionIdColumn1669739707124 } from '../common/1669739707124-AddWorkflowVersionIdColumn';
export class AddWorkflowVersionIdColumn1669739707125 extends AddWorkflowVersionIdColumn1669739707124 {}

View File

@@ -1,14 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddTriggerCountColumn1669823906994 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN triggerCount integer NOT NULL DEFAULT 0`,
);
// Table will be populated by n8n startup - see ActiveWorkflowManager.ts
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN triggerCount`);
}
}

View File

@@ -1,18 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class MessageEventBusDestinations1671535397530 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE ${tablePrefix}event_destinations (` +
'`id` varchar(36) PRIMARY KEY NOT NULL,' +
'`destination` text NOT NULL,' +
'`createdAt` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, ' +
'`updatedAt` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP' +
") ENGINE='InnoDB';",
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE "${tablePrefix}event_destinations"`);
}
}

View File

@@ -1,3 +0,0 @@
import { RemoveWorkflowDataLoadedFlag1671726148419 } from '../common/1671726148419-RemoveWorkflowDataLoadedFlag';
export class RemoveWorkflowDataLoadedFlag1671726148420 extends RemoveWorkflowDataLoadedFlag1671726148419 {}

View File

@@ -1,35 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class DeleteExecutionsWithWorkflows1673268682475 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`ALTER TABLE \`${tablePrefix}execution_entity\` MODIFY workflowId INT`);
const workflowIds = (await queryRunner.query(`
SELECT id FROM \`${tablePrefix}workflow_entity\`
`)) as Array<{ id: number }>;
await queryRunner.query(
`DELETE FROM \`${tablePrefix}execution_entity\`
WHERE workflowId IS NOT NULL
${workflowIds.length ? `AND workflowId NOT IN (${workflowIds.map(({ id }) => id).join()})` : ''}`,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\`
ADD CONSTRAINT \`FK_${tablePrefix}execution_entity_workflowId\`
FOREIGN KEY (\`workflowId\`) REFERENCES \`${tablePrefix}workflow_entity\`(\`id\`)
ON DELETE CASCADE`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\`
DROP FOREIGN KEY \`FK_${tablePrefix}execution_entity_workflowId\``,
);
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\` MODIFY workflowId varchar(255);`,
);
}
}

View File

@@ -1,15 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddStatusToExecutions1674138566000 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\` ADD COLUMN \`status\` VARCHAR(255)`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE \`${tablePrefix}execution_entity\` DROP COLUMN \`status\``,
);
}
}

View File

@@ -1,18 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class MigrateExecutionStatus1676996103000 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`UPDATE \`${tablePrefix}execution_entity\` SET status='waiting' WHERE status IS NULL AND \`waitTill\` IS NOT NULL;`,
);
await queryRunner.query(
`UPDATE \`${tablePrefix}execution_entity\` SET status='failed' WHERE status IS NULL AND finished=0 AND \`stoppedAt\` IS NOT NULL;`,
);
await queryRunner.query(
`UPDATE \`${tablePrefix}execution_entity\` SET status='success' WHERE status IS NULL AND finished=1 AND \`stoppedAt\` IS NOT NULL;`,
);
await queryRunner.query(
`UPDATE \`${tablePrefix}execution_entity\` SET status='crashed' WHERE status IS NULL;`,
);
}
}

View File

@@ -1,12 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class UpdateRunningExecutionStatus1677236788851 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`UPDATE \`${tablePrefix}execution_entity\` SET status='failed' WHERE status = 'running' AND finished=0 AND \`stoppedAt\` IS NOT NULL;`,
);
await queryRunner.query(
`UPDATE \`${tablePrefix}execution_entity\` SET status='success' WHERE status = 'running' AND finished=1 AND \`stoppedAt\` IS NOT NULL;`,
);
}
}

View File

@@ -1,20 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateVariables1677501636753 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`
CREATE TABLE ${tablePrefix}variables (
id int(11) auto_increment NOT NULL PRIMARY KEY,
\`key\` VARCHAR(50) NOT NULL,
\`type\` VARCHAR(50) DEFAULT 'string' NOT NULL,
value VARCHAR(255) NULL,
UNIQUE (\`key\`)
)
ENGINE=InnoDB;
`);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE ${tablePrefix}variables;`);
}
}

View File

@@ -1,59 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class CreateExecutionMetadataTable1679416281779 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE ${tablePrefix}execution_metadata (
id int(11) auto_increment NOT NULL PRIMARY KEY,
executionId int(11) NOT NULL,
\`key\` TEXT NOT NULL,
value TEXT NOT NULL,
CONSTRAINT \`${tablePrefix}execution_metadata_FK\` FOREIGN KEY (\`executionId\`) REFERENCES \`${tablePrefix}execution_entity\` (\`id\`) ON DELETE CASCADE,
INDEX \`IDX_${tablePrefix}6d44376da6c1058b5e81ed8a154e1fee106046eb\` (\`executionId\` ASC)
)
ENGINE=InnoDB`,
);
// Remove indices that are no longer needed since the addition of the status column
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}06da892aaf92a48e7d3e400003\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}78d62b89dc1433192b86dce18a\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}1688846335d274033e15c846a4\` ON \`${tablePrefix}execution_entity\``,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}cefb067df2402f6aed0638a6c1\` ON \`${tablePrefix}execution_entity\``,
);
// Add index to the new status column
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}8b6f3f9ae234f137d707b98f3bf43584\` ON \`${tablePrefix}execution_entity\` (\`status\`, \`workflowId\`)`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_metadata"`);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}06da892aaf92a48e7d3e400003\` ON \`${tablePrefix}execution_entity\` (\`workflowId\`, \`waitTill\`, \`id\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}78d62b89dc1433192b86dce18a\` ON \`${tablePrefix}execution_entity\` (\`workflowId\`, \`finished\`, \`id\`)`,
);
await queryRunner.query(
`CREATE INDEX \`IDX_${tablePrefix}1688846335d274033e15c846a4\` ON \`${tablePrefix}execution_entity\` (\`finished\`, \`id\`)`,
);
await queryRunner.query(
'CREATE INDEX `IDX_' +
tablePrefix +
'cefb067df2402f6aed0638a6c1` ON `' +
tablePrefix +
'execution_entity` (`stoppedAt`)',
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}8b6f3f9ae234f137d707b98f3bf43584\` ON \`${tablePrefix}execution_entity\``,
);
}
}

View File

@@ -1,53 +0,0 @@
import type { UserSettings } from '../../entities/types-db';
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddUserActivatedProperty1681134145996 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
const activatedUsers = (await queryRunner.query(
`SELECT DISTINCT sw.userId AS id,
JSON_SET(COALESCE(u.settings, '{}'), '$.userActivated', true) AS settings
FROM ${tablePrefix}workflow_statistics AS ws
JOIN ${tablePrefix}shared_workflow as sw
ON ws.workflowId = sw.workflowId
JOIN ${tablePrefix}role AS r
ON r.id = sw.roleId
JOIN ${tablePrefix}user AS u
ON u.id = sw.userId
WHERE ws.name = 'production_success'
AND r.name = 'owner'
AND r.scope = 'workflow'`,
)) as UserSettings[];
const updatedUsers = activatedUsers.map(async (user) => {
/*
MariaDB returns settings as a string and MySQL as a JSON
*/
const userSettings =
typeof user.settings === 'string' ? user.settings : JSON.stringify(user.settings);
await queryRunner.query(
`UPDATE ${tablePrefix}user SET settings = '${userSettings}' WHERE id = '${user.id}' `,
);
});
await Promise.all(updatedUsers);
if (!activatedUsers.length) {
await queryRunner.query(
`UPDATE ${tablePrefix}user SET settings = JSON_SET(COALESCE(settings, '{}'), '$.userActivated', false)`,
);
} else {
const activatedUserIds = activatedUsers.map((user) => `'${user.id}'`).join(',');
await queryRunner.query(
`UPDATE ${tablePrefix}user SET settings = JSON_SET(COALESCE(settings, '{}'), '$.userActivated', false) WHERE id NOT IN (${activatedUserIds})`,
);
}
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`UPDATE ${tablePrefix}user SET settings = JSON_REMOVE(settings, '$.userActivated')`,
);
await queryRunner.query(`UPDATE ${tablePrefix}user SET settings = NULL WHERE settings = '{}'`);
}
}

View File

@@ -1,9 +0,0 @@
import type { IrreversibleMigration, MigrationContext } from '../migration-types';
export class RemoveSkipOwnerSetup1681134145997 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`DELETE FROM ${tablePrefix}settings WHERE \`key\` = 'userManagement.skipInstanceOwnerSetup';`,
);
}
}

View File

@@ -1,275 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
const COLLATION_57 = 'utf8mb4_general_ci';
const COLLATION_80 = 'utf8mb4_0900_ai_ci';
export class MigrateIntegerKeysToString1690000000001 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix, dbType }: MigrationContext) {
let collation: string;
if (dbType === 'mariadb') {
collation = COLLATION_57;
} else {
const dbVersionQuery = (await queryRunner.query('SELECT @@version')) as
| Array<{ '@@version': string }>
| undefined;
collation = COLLATION_80;
if (dbVersionQuery?.length === 1) {
const dbVersion = dbVersionQuery[0]['@@version'];
if (dbVersion.startsWith('5.7')) {
collation = COLLATION_57;
}
}
}
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity CHANGE id tmp_id int NOT NULL AUTO_INCREMENT;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(`UPDATE ${tablePrefix}workflow_entity SET id = CONVERT(tmp_id, CHAR);`);
await queryRunner.query(
`CREATE UNIQUE INDEX \`TMP_idx_${tablePrefix}workflow_entity_id\` ON ${tablePrefix}workflow_entity (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity CHANGE id tmp_id int NOT NULL AUTO_INCREMENT;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(`UPDATE ${tablePrefix}tag_entity SET id = CONVERT(tmp_id, CHAR);`);
await queryRunner.query(
`CREATE UNIQUE INDEX \`TMP_idx_${tablePrefix}tag_entity_id\` ON ${tablePrefix}tag_entity (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags CHANGE workflowId tmp_workflowId int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN \`workflowId\` varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation} NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflows_tags SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags CHANGE tagId tmp_tagId int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD COLUMN \`tagId\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflows_tags SET \`tagId\` = CONVERT(\`tmp_tagId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP PRIMARY KEY, ADD PRIMARY KEY (\`workflowId\`, \`tagId\`);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_${tablePrefix}workflows_tags_workflow_id\` ON ${tablePrefix}workflows_tags (\`workflowId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP FOREIGN KEY \`FK_${tablePrefix}54b2f0343d6a2078fa137443869\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT \`fk_${tablePrefix}workflows_tags_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES ${tablePrefix}workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP FOREIGN KEY \`FK_${tablePrefix}77505b341625b0b4768082e2171\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT \`fk_${tablePrefix}workflows_tags_tag_id\` FOREIGN KEY (\`tagId\`) REFERENCES ${tablePrefix}tag_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflows_tags DROP COLUMN \`tmp_tagId\`;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow CHANGE workflowId tmp_workflowId int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ADD COLUMN \`workflowId\` varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation} NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}shared_workflow SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP PRIMARY KEY, ADD PRIMARY KEY (\`userId\`, \`workflowId\`);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_${tablePrefix}shared_workflow_workflow_id\` ON ${tablePrefix}shared_workflow (\`workflowId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP FOREIGN KEY \`FK_${tablePrefix}b83f8d2530884b66a9c848c8b88\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow ADD CONSTRAINT \`fk_${tablePrefix}shared_workflow_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES ${tablePrefix}workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_workflow DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics CHANGE workflowId tmp_workflowId int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ADD COLUMN \`workflowId\` varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation} NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}workflow_statistics SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_${tablePrefix}workflow_statistics_workflow_id\` ON ${tablePrefix}workflow_statistics (\`workflowId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP FOREIGN KEY \`${tablePrefix}workflow_statistics_ibfk_1\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics ADD CONSTRAINT \`fk_${tablePrefix}workflow_statistics_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES ${tablePrefix}workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP PRIMARY KEY, ADD PRIMARY KEY (\`workflowId\`, \`name\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_statistics DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity CHANGE workflowId tmp_workflowId int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD COLUMN \`workflowId\` varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation} NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}webhook_entity SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD CONSTRAINT \`fk_${tablePrefix}webhook_entity_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES ${tablePrefix}workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity CHANGE workflowId tmp_workflowId int NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD COLUMN \`workflowId\` varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation};`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}execution_entity SET \`workflowId\` = CONVERT(\`tmp_workflowId\`, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_${tablePrefix}execution_entity_workflow_id_id\` ON ${tablePrefix}execution_entity (\`workflowId\`,\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP FOREIGN KEY \`FK_${tablePrefix}execution_entity_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD CONSTRAINT \`fk_${tablePrefix}execution_entity_workflow_id\` FOREIGN KEY (\`workflowId\`) REFERENCES ${tablePrefix}workflow_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`DROP INDEX \`IDX_${tablePrefix}81fc04c8a17de15835713505e4\` ON ${tablePrefix}execution_entity;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN \`tmp_workflowId\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}workflow_entity_id\` ON ${tablePrefix}workflow_entity;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}tag_entity DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}tag_entity_id\` ON ${tablePrefix}tag_entity;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}tag_entity DROP COLUMN tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity CHANGE id tmp_id int NOT NULL AUTO_INCREMENT;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity ADD COLUMN id varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation} NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}credentials_entity SET id = CONVERT(tmp_id, CHAR);`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX \`TMP_idx_${tablePrefix}credentials_entity_id\` ON ${tablePrefix}credentials_entity (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials CHANGE credentialsId tmp_credentialsId int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ADD COLUMN credentialsId varchar(36) CHARACTER SET utf8mb4 COLLATE ${collation} NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}shared_credentials SET credentialsId = CONVERT(tmp_credentialsId, CHAR);`,
);
await queryRunner.query(
`CREATE INDEX \`idx_${tablePrefix}shared_credentials_id\` ON ${tablePrefix}shared_credentials (\`credentialsId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP FOREIGN KEY \`FK_${tablePrefix}68661def1d4bcf2451ac8dbd949\`;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials ADD CONSTRAINT \`fk_${tablePrefix}shared_credentials_credentials_id\` FOREIGN KEY (\`credentialsId\`) REFERENCES ${tablePrefix}credentials_entity(id) ON DELETE CASCADE ON UPDATE NO ACTION;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials MODIFY COLUMN tmp_credentialsId INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP PRIMARY KEY, ADD PRIMARY KEY (\`userId\`,\`credentialsId\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}shared_credentials DROP COLUMN tmp_credentialsId;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}credentials_entity DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}credentials_entity_id\` ON ${tablePrefix}credentials_entity;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity DROP COLUMN tmp_id;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables CHANGE id tmp_id int NOT NULL AUTO_INCREMENT;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables ADD COLUMN \`id\` varchar(36) NOT NULL;`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}variables SET \`id\` = CONVERT(\`tmp_id\`, CHAR);`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX \`TMP_idx_${tablePrefix}variables_id\` ON ${tablePrefix}variables (\`id\`);`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables CHANGE \`tmp_id\` \`tmp_id\` int NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}variables DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}variables DROP COLUMN \`tmp_id\`;`);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}variables_id\` ON ${tablePrefix}variables;`,
);
}
}

View File

@@ -1,43 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class SeparateExecutionData1690000000030 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`CREATE TABLE ${tablePrefix}execution_data (
executionId int(11) NOT NULL primary key,
workflowData json NOT NULL,
data MEDIUMTEXT NOT NULL,
CONSTRAINT \`${tablePrefix}execution_data_FK\` FOREIGN KEY (\`executionId\`) REFERENCES \`${tablePrefix}execution_entity\` (\`id\`) ON DELETE CASCADE
)
ENGINE=InnoDB`,
);
await queryRunner.query(
`INSERT INTO ${tablePrefix}execution_data (
executionId,
workflowData,
data)
SELECT id, workflowData, data FROM ${tablePrefix}execution_entity
`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity DROP COLUMN workflowData, DROP COLUMN data`,
);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity
ADD workflowData json NULL,
ADD data MEDIUMTEXT NULL`,
);
await queryRunner.query(
`UPDATE ${tablePrefix}execution_entity SET workflowData = ${tablePrefix}execution_data.workflowData, data = ${tablePrefix}execution_data.data
FROM ${tablePrefix}execution_data WHERE ${tablePrefix}execution_data.executionId = ${tablePrefix}execution_entity.id`,
);
await queryRunner.query(`DROP TABLE ${tablePrefix}execution_data`);
}
}

View File

@@ -1,17 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class FixExecutionDataType1690000000031 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
/**
* SeparateExecutionData migration for MySQL/MariaDB accidentally changed the data-type for `data` column to `TEXT`.
* This migration changes it back.
* The previous migration has been patched to avoid converting to `TEXT`, which might fail.
*
* For any users who already ran the previous migration, this migration should fix the column type.
* For any users who run these migrations in the same batch, this migration would be no-op, as the column type is already `MEDIUMTEXT`
*/
await queryRunner.query(
'ALTER TABLE `' + tablePrefix + 'execution_data` MODIFY COLUMN `data` MEDIUMTEXT',
);
}
}

View File

@@ -1,20 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
export class AddActivatedAtUserSetting1717498465931 implements ReversibleMigration {
async up({ queryRunner, escape }: MigrationContext) {
const now = Date.now();
await queryRunner.query(
`UPDATE ${escape.tableName('user')}
SET settings = JSON_SET(COALESCE(settings, '{}'), '$.userActivatedAt', '${now}')
WHERE settings IS NOT NULL AND JSON_EXTRACT(settings, '$.userActivated') = true`,
);
}
async down({ queryRunner, escape }: MigrationContext) {
await queryRunner.query(
`UPDATE ${escape.tableName('user')}
SET settings = JSON_REMOVE(settings, '$.userActivatedAt')
WHERE settings IS NOT NULL`,
);
}
}

View File

@@ -1,32 +0,0 @@
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class MigrateTestDefinitionKeyToString1731582748663 implements IrreversibleMigration {
async up(context: MigrationContext) {
const { queryRunner, tablePrefix } = context;
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition CHANGE id tmp_id int NOT NULL AUTO_INCREMENT;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(`UPDATE ${tablePrefix}test_definition SET id = CONVERT(tmp_id, CHAR);`);
await queryRunner.query(
`CREATE INDEX \`TMP_idx_${tablePrefix}test_definition_id\` ON ${tablePrefix}test_definition (\`id\`);`,
);
// Note: this part was missing in initial release and was added after. Without it the migration run successfully,
// but left the table in inconsistent state, because it didn't finish changing the primary key and deleting the old one.
// This prevented the next migration from running on MySQL 8.4.4
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}test_definition_id\` ON ${tablePrefix}test_definition;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}test_definition DROP COLUMN tmp_id;`);
}
}

View File

@@ -1,49 +0,0 @@
import assert from 'node:assert';
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const testMetricEntityTableName = 'test_metric';
export class CreateTestMetricTable1732271325258 implements ReversibleMigration {
async up({ schemaBuilder: { createTable, column }, queryRunner, tablePrefix }: MigrationContext) {
// Check if the previous migration MigrateTestDefinitionKeyToString1731582748663 properly updated the primary key
const table = await queryRunner.getTable(`${tablePrefix}test_definition`);
assert(table, 'test_definition table not found');
const brokenPrimaryColumn = table.primaryColumns.some(
(c) => c.name === 'tmp_id' && c.isPrimary,
);
if (brokenPrimaryColumn) {
// The migration was completed, but left the table in inconsistent state, let's finish the primary key change
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}test_definition_id\` ON ${tablePrefix}test_definition;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}test_definition DROP COLUMN tmp_id;`);
}
// End of test_definition PK check
await createTable(testMetricEntityTableName)
.withColumns(
column('id').varchar(36).primary.notNull,
column('name').varchar(255).notNull,
column('testDefinitionId').varchar(36).notNull,
)
.withIndexOn('testDefinitionId')
.withForeignKey('testDefinitionId', {
tableName: 'test_definition',
columnName: 'id',
onDelete: 'CASCADE',
}).withTimestamps;
}
async down({ schemaBuilder: { dropTable } }: MigrationContext) {
await dropTable(testMetricEntityTableName);
}
}

View File

@@ -1,28 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const columns = ['totalCases', 'passedCases', 'failedCases'] as const;
// Note: This migration was separated from common after release to remove column check constraints
// because they were causing issues with MySQL
export class AddStatsColumnsToTestRun1736172058779 implements ReversibleMigration {
async up({ escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('test_run');
const columnNames = columns.map((name) => escape.columnName(name));
// Values can be NULL only if the test run is new, otherwise they must be non-negative integers.
// Test run might be cancelled or interrupted by unexpected error at any moment, so values can be either NULL or non-negative integers.
for (const name of columnNames) {
await runQuery(`ALTER TABLE ${tableName} ADD COLUMN ${name} INT;`);
}
}
async down({ escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('test_run');
const columnNames = columns.map((name) => escape.columnName(name));
for (const name of columnNames) {
await runQuery(`ALTER TABLE ${tableName} DROP COLUMN ${name}`);
}
}
}

View File

@@ -1,44 +0,0 @@
import assert from 'node:assert';
import type { MigrationContext, IrreversibleMigration } from '../migration-types';
export class FixTestDefinitionPrimaryKey1739873751194 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
/**
* MigrateTestDefinitionKeyToString migration for MySQL/MariaDB had missing part,
* and didn't complete primary key type change and deletion of the temporary column.
*
* This migration checks if table is in inconsistent state and finishes the primary key type change when needed.
*
* The MigrateTestDefinitionKeyToString migration has been patched to properly change the primary key.
*
* As the primary key issue might prevent the CreateTestMetricTable migration from running successfully on MySQL 8.4.4,
* the CreateTestMetricTable also contains the patch.
*
* For users who already ran the MigrateTestDefinitionKeyToString and CreateTestMetricTable, this migration should fix the primary key.
* For users who run these migrations in the same batch, this migration would be no-op, as the test_definition table should be already fixed
* by either of the previous patched migrations.
*/
const table = await queryRunner.getTable(`${tablePrefix}test_definition`);
assert(table, 'test_definition table not found');
const brokenPrimaryColumn = table.primaryColumns.some(
(c) => c.name === 'tmp_id' && c.isPrimary,
);
if (brokenPrimaryColumn) {
// The migration was completed, but left the table in inconsistent state, let's finish the primary key change
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition MODIFY COLUMN tmp_id INT NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition DROP PRIMARY KEY, ADD PRIMARY KEY (\`id\`);`,
);
await queryRunner.query(
`DROP INDEX \`TMP_idx_${tablePrefix}test_definition_id\` ON ${tablePrefix}test_definition;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}test_definition DROP COLUMN tmp_id;`);
}
}
}

View File

@@ -1,14 +0,0 @@
import type { BaseMigration, MigrationContext } from '../migration-types';
export class UpdateParentFolderIdColumn1740445074052 implements BaseMigration {
async up({ escape, queryRunner }: MigrationContext) {
const workflowTableName = escape.tableName('workflow_entity');
const folderTableName = escape.tableName('folder');
const parentFolderIdColumn = escape.columnName('parentFolderId');
const idColumn = escape.columnName('id');
await queryRunner.query(
`ALTER TABLE ${workflowTableName} ADD CONSTRAINT fk_workflow_parent_folder FOREIGN KEY (${parentFolderIdColumn}) REFERENCES ${folderTableName}(${idColumn}) ON DELETE CASCADE`,
);
}
}

View File

@@ -1,92 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const VARIABLES_TABLE_NAME = 'variables';
const UNIQUE_PROJECT_KEY_INDEX_NAME = 'variables_project_key_unique';
const UNIQUE_GLOBAL_KEY_INDEX_NAME = 'variables_global_key_unique';
const PROJECT_ID_FOREIGN_KEY_NAME = 'variables_projectId_foreign';
/**
* Adds a projectId column to the variables table to support project-scoped variables.
* In MySQL, also adds a generated column (globalKey) to enforce uniqueness
* for global variables (where projectId is null).
*/
export class AddProjectIdToVariableTable1758794506893 implements ReversibleMigration {
async up({
schemaBuilder: { addColumns, column, dropIndex, addForeignKey },
queryRunner,
escape,
}: MigrationContext) {
const variablesTableName = escape.tableName(VARIABLES_TABLE_NAME);
// Drop the old unique index on key
await dropIndex(VARIABLES_TABLE_NAME, ['key'], { customIndexName: 'key' });
// Add projectId and globalKey columns
await addColumns(VARIABLES_TABLE_NAME, [column('projectId').varchar(36)]);
// Add generated column for global uniqueness
// Null values are considered unique in MySQL, so we create a generated column
// that contains the key when projectId is null, and null otherwise.
await queryRunner.query(`
ALTER TABLE ${variablesTableName}
ADD COLUMN globalKey VARCHAR(255) GENERATED ALWAYS AS (
CASE WHEN projectId IS NULL THEN \`key\` ELSE NULL END
) STORED;
`);
// Add foreign key to project
// Create it after the generated column to avoid limits of mysql
// https://dev.mysql.com/doc/refman/8.4/en/create-table-foreign-keys.html
// "A foreign key constraint on a stored generated column cannot use CASCADE"
await addForeignKey(
VARIABLES_TABLE_NAME,
'projectId',
['project', 'id'],
PROJECT_ID_FOREIGN_KEY_NAME,
);
// Unique index for project-specific variables
await queryRunner.query(`
CREATE UNIQUE INDEX ${UNIQUE_PROJECT_KEY_INDEX_NAME}
ON ${variablesTableName} (projectId, \`key\`);
`);
// Unique index for global variables
await queryRunner.query(`
CREATE UNIQUE INDEX ${UNIQUE_GLOBAL_KEY_INDEX_NAME}
ON ${variablesTableName} (globalKey);
`);
}
// Down migration do not use typeorm helpers
// to prevent error with generated columns
// because typeorm tries to fetch the column details from typeorm metadata
async down({ queryRunner, escape }: MigrationContext) {
const variablesTableName = escape.tableName(VARIABLES_TABLE_NAME);
// Delete the rows with a non-null projectId (data loss)
await queryRunner.query(`DELETE FROM ${variablesTableName} WHERE projectId IS NOT NULL;`);
// Drop the generated column index
await queryRunner.query(`DROP INDEX ${UNIQUE_GLOBAL_KEY_INDEX_NAME} ON ${variablesTableName};`);
// Drop the generated column
await queryRunner.query(`ALTER TABLE ${variablesTableName} DROP COLUMN globalKey;`);
// Drop the project id column, foreign key and its associated index
await queryRunner.query(
`ALTER TABLE ${variablesTableName} DROP FOREIGN KEY ${PROJECT_ID_FOREIGN_KEY_NAME};`,
);
await queryRunner.query(
`DROP INDEX ${UNIQUE_PROJECT_KEY_INDEX_NAME} ON ${variablesTableName};`,
);
await queryRunner.query(`ALTER TABLE ${variablesTableName} DROP COLUMN projectId;`);
// Reset the original unique index on key
await queryRunner.query(
`ALTER TABLE ${variablesTableName} ADD CONSTRAINT \`key\` UNIQUE (\`key\`);`,
);
}
}

View File

@@ -1,57 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const table = {
messages: 'chat_hub_messages',
} as const;
export class DropUnusedChatHubColumns1760965142113 implements ReversibleMigration {
async up({
runQuery,
tablePrefix,
schemaBuilder: { addColumns, dropColumns, column },
}: MigrationContext) {
// MySQL needs to drop foreign keys before dropping a column (turnId) that uses it.
// Name of the FK depends on table prefix, so we can't just hardcode it.
const tableName = `${tablePrefix}${table.messages}`;
const foreignKeys: Array<{ name: string }> = await runQuery(
`SELECT CONSTRAINT_NAME AS name
FROM information_schema.KEY_COLUMN_USAGE
WHERE TABLE_SCHEMA = DATABASE()
AND TABLE_NAME = '${tableName}'
AND COLUMN_NAME = 'turnId'
AND REFERENCED_TABLE_NAME IS NOT NULL;`,
);
// There should only be one, but just in case handle multiple
for (const { name } of foreignKeys) {
await runQuery(`ALTER TABLE \`${tableName}\` DROP FOREIGN KEY \`${name}\`;`);
}
await dropColumns(table.messages, ['turnId', 'runIndex', 'state']);
await addColumns(table.messages, [
column('status')
.varchar(16)
.default("'success'")
.notNull.comment(
'ChatHubMessageStatus enum, eg. "success", "error", "running", "cancelled"',
),
]);
}
async down({
schemaBuilder: { dropColumns, addColumns, column, addForeignKey },
}: MigrationContext) {
await dropColumns(table.messages, ['status']);
await addColumns(table.messages, [
column('turnId').uuid,
column('runIndex')
.int.notNull.default(0)
.comment('The nth attempt this message has been generated/retried this turn'),
column('state')
.varchar(16)
.default("'active'")
.notNull.comment('ChatHubMessageState enum: "active", "superseded", "hidden", "deleted"'),
]);
await addForeignKey(table.messages, 'turnId', [table.messages, 'id'], undefined, 'CASCADE');
}
}

View File

@@ -1,40 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
/**
* MySQL-specific migration to add versionCounter column and trigger for auto-incrementing.
*/
export class AddWorkflowVersionColumn1761047826451 implements ReversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
const tableName = `${tablePrefix}workflow_entity`;
const triggerName = `${tablePrefix}workflow_version_increment`;
// Add versionCounter column
await queryRunner.query(
`ALTER TABLE \`${tableName}\` ADD COLUMN \`versionCounter\` int NOT NULL DEFAULT 1`,
);
// Create trigger that increments version counter before update.
// NOTE: we're modifying the NEW record before the update happens, so we do it BEFORE the update.
await queryRunner.query(`
CREATE TRIGGER \`${triggerName}\`
BEFORE UPDATE ON \`${tableName}\`
FOR EACH ROW
BEGIN
IF OLD.versionCounter = NEW.versionCounter THEN
SET NEW.versionCounter = OLD.versionCounter + 1;
END IF;
END;
`);
}
async down({ queryRunner, tablePrefix }: MigrationContext) {
const tableName = `${tablePrefix}workflow_entity`;
const triggerName = `${tablePrefix}workflow_version_increment`;
// Drop trigger
await queryRunner.query(`DROP TRIGGER IF EXISTS \`${triggerName}\``);
// Drop column
await queryRunner.query(`ALTER TABLE \`${tableName}\` DROP COLUMN \`versionCounter\``);
}
}

View File

@@ -1,15 +0,0 @@
import type { IrreversibleMigration, MigrationContext } from '../migration-types';
/**
* MySQL-specific migration to change the `dependencyInfo` column in `workflow_dependency` table from VARCHAR(255) to JSON.
* Handles both MySQL and MariaDB.
*/
export class ChangeDependencyInfoToJson1761655473000 implements IrreversibleMigration {
async up({ queryRunner, tablePrefix }: MigrationContext) {
const tableName = `${tablePrefix}workflow_dependency`;
await queryRunner.query(
`ALTER TABLE \`${tableName}\` MODIFY COLUMN \`dependencyInfo\` JSON NULL COMMENT 'Additional info about the dependency, interpreted based on type'`,
);
}
}

View File

@@ -1,32 +0,0 @@
import type { MigrationContext, ReversibleMigration } from '../migration-types';
const table = {
sessions: 'chat_hub_sessions',
agents: 'chat_hub_agents',
} as const;
export class AddToolsColumnToChatHubTables1761830340990 implements ReversibleMigration {
async up({ schemaBuilder: { addColumns, column }, queryRunner, tablePrefix }: MigrationContext) {
await addColumns(table.sessions, [
column('tools').json.notNull.comment('Tools available to the agent as JSON node definitions'),
]);
await addColumns(table.agents, [
column('tools').json.notNull.comment('Tools available to the agent as JSON node definitions'),
]);
// Add a default value for existing rows
await Promise.all(
[
`UPDATE \`${tablePrefix}${table.sessions}\` SET \`tools\` = '[]' WHERE JSON_TYPE(\`tools\`) = 'NULL'`,
`UPDATE \`${tablePrefix}${table.agents}\` SET \`tools\` = '[]' WHERE JSON_TYPE(\`tools\`) = 'NULL'`,
].map(async (query) => {
await queryRunner.query(query);
}),
);
}
async down({ schemaBuilder: { dropColumns } }: MigrationContext) {
await dropColumns(table.sessions, ['tools']);
await dropColumns(table.agents, ['tools']);
}
}

View File

@@ -1,266 +0,0 @@
import { InitialMigration1588157391238 } from './1588157391238-InitialMigration';
import { WebhookModel1592447867632 } from './1592447867632-WebhookModel';
import { CreateIndexStoppedAt1594902918301 } from './1594902918301-CreateIndexStoppedAt';
import { MakeStoppedAtNullable1607431743767 } from './1607431743767-MakeStoppedAtNullable';
import { AddWebhookId1611149998770 } from './1611149998770-AddWebhookId';
import { ChangeDataSize1615306975123 } from './1615306975123-ChangeDataSize';
import { CreateTagEntity1617268711084 } from './1617268711084-CreateTagEntity';
import { ChangeCredentialDataSize1620729500000 } from './1620729500000-ChangeCredentialDataSize';
import { UniqueWorkflowNames1620826335440 } from './1620826335440-UniqueWorkflowNames';
import { CertifyCorrectCollation1623936588000 } from './1623936588000-CertifyCorrectCollation';
import { AddWaitColumnId1626183952959 } from './1626183952959-AddWaitColumn';
import { UpdateWorkflowCredentials1630451444017 } from './1630451444017-UpdateWorkflowCredentials';
import { AddExecutionEntityIndexes1644424784709 } from './1644424784709-AddExecutionEntityIndexes';
import { CreateUserManagement1646992772331 } from './1646992772331-CreateUserManagement';
import { LowerCaseUserEmail1648740597343 } from './1648740597343-LowerCaseUserEmail';
import { CommunityNodes1652254514003 } from './1652254514003-CommunityNodes';
import { AddUserSettings1652367743993 } from './1652367743993-AddUserSettings';
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
import { IntroducePinData1654090101303 } from './1654090101303-IntroducePinData';
import { AddNodeIds1658932910559 } from './1658932910559-AddNodeIds';
import { AddJsonKeyPinData1659895550980 } from './1659895550980-AddJsonKeyPinData';
import { CreateCredentialsUserRole1660062385367 } from './1660062385367-CreateCredentialsUserRole';
import { CreateWorkflowsEditorRole1663755770894 } from './1663755770894-CreateWorkflowsEditorRole';
import { WorkflowStatistics1664196174002 } from './1664196174002-WorkflowStatistics';
import { CreateCredentialUsageTable1665484192213 } from './1665484192213-CreateCredentialUsageTable';
import { RemoveCredentialUsageTable1665754637026 } from './1665754637026-RemoveCredentialUsageTable';
import { AddWorkflowVersionIdColumn1669739707125 } from './1669739707125-AddWorkflowVersionIdColumn';
import { AddTriggerCountColumn1669823906994 } from './1669823906994-AddTriggerCountColumn';
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
import { RemoveWorkflowDataLoadedFlag1671726148420 } from './1671726148420-RemoveWorkflowDataLoadedFlag';
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
import { AddStatusToExecutions1674138566000 } from './1674138566000-AddStatusToExecutions';
import { MigrateExecutionStatus1676996103000 } from './1676996103000-MigrateExecutionStatus';
import { UpdateRunningExecutionStatus1677236788851 } from './1677236788851-UpdateRunningExecutionStatus';
import { CreateVariables1677501636753 } from './1677501636753-CreateVariables';
import { CreateExecutionMetadataTable1679416281779 } from './1679416281779-CreateExecutionMetadataTable';
import { AddUserActivatedProperty1681134145996 } from './1681134145996-AddUserActivatedProperty';
import { RemoveSkipOwnerSetup1681134145997 } from './1681134145997-RemoveSkipOwnerSetup';
import { MigrateIntegerKeysToString1690000000001 } from './1690000000001-MigrateIntegerKeysToString';
import { SeparateExecutionData1690000000030 } from './1690000000030-SeparateExecutionData';
import { FixExecutionDataType1690000000031 } from './1690000000031-FixExecutionDataType';
import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting';
import { MigrateTestDefinitionKeyToString1731582748663 } from './1731582748663-MigrateTestDefinitionKeyToString';
import { CreateTestMetricTable1732271325258 } from './1732271325258-CreateTestMetricTable';
import { AddStatsColumnsToTestRun1736172058779 } from './1736172058779-AddStatsColumnsToTestRun';
import { FixTestDefinitionPrimaryKey1739873751194 } from './1739873751194-FixTestDefinitionPrimaryKey';
import { UpdateParentFolderIdColumn1740445074052 } from './1740445074052-UpdateParentFolderIdColumn';
import { AddProjectIdToVariableTable1758794506893 } from './1758794506893-AddProjectIdToVariableTable';
import { DropUnusedChatHubColumns1760965142113 } from './1760965142113-DropUnusedChatHubColumns';
import { AddWorkflowVersionColumn1761047826451 } from './1761047826451-AddWorkflowVersionColumn';
import { ChangeDependencyInfoToJson1761655473000 } from './1761655473000-ChangeDependencyInfoToJson';
import { AddToolsColumnToChatHubTables1761830340990 } from './1761830340990-AddToolsColumnToChatHubTables';
import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities';
import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections';
import { RemoveResetPasswordColumns1690000000030 } from '../common/1690000000030-RemoveResetPasswordColumns';
import { AddMfaColumns1690000000030 } from '../common/1690000000040-AddMfaColumns';
import { CreateWorkflowNameIndex1691088862123 } from '../common/1691088862123-CreateWorkflowNameIndex';
import { CreateWorkflowHistoryTable1692967111175 } from '../common/1692967111175-CreateWorkflowHistoryTable';
import { ExecutionSoftDelete1693491613982 } from '../common/1693491613982-ExecutionSoftDelete';
import { DisallowOrphanExecutions1693554410387 } from '../common/1693554410387-DisallowOrphanExecutions';
import { AddWorkflowMetadata1695128658538 } from '../common/1695128658538-AddWorkflowMetadata';
import { ModifyWorkflowHistoryNodesAndConnections1695829275184 } from '../common/1695829275184-ModifyWorkflowHistoryNodesAndConnections';
import { AddGlobalAdminRole1700571993961 } from '../common/1700571993961-AddGlobalAdminRole';
import { DropRoleMapping1705429061930 } from '../common/1705429061930-DropRoleMapping';
import { RemoveFailedExecutionStatus1711018413374 } from '../common/1711018413374-RemoveFailedExecutionStatus';
import { MoveSshKeysToDatabase1711390882123 } from '../common/1711390882123-MoveSshKeysToDatabase';
import { RemoveNodesAccess1712044305787 } from '../common/1712044305787-RemoveNodesAccess';
import { CreateProject1714133768519 } from '../common/1714133768519-CreateProject';
import { MakeExecutionStatusNonNullable1714133768521 } from '../common/1714133768521-MakeExecutionStatusNonNullable';
import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101653148-AddConstraintToExecutionMetadata';
import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable';
import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices';
import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables';
import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable';
import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable';
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { AddProjectIcons1729607673469 } from '../common/1729607673469-AddProjectIcons';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
import { AddDescriptionToTestDefinition1731404028106 } from '../common/1731404028106-AddDescriptionToTestDefinition';
import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable';
import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition';
import { AddManagedColumnToCredentialsTable1734479635324 } from '../common/1734479635324-AddManagedColumnToCredentialsTable';
import { CreateTestCaseExecutionTable1736947513045 } from '../common/1736947513045-CreateTestCaseExecutionTable';
import { AddErrorColumnsToTestRuns1737715421462 } from '../common/1737715421462-AddErrorColumnsToTestRuns';
import { CreateFolderTable1738709609940 } from '../common/1738709609940-CreateFolderTable';
import { CreateAnalyticsTables1739549398681 } from '../common/1739549398681-CreateAnalyticsTables';
import { RenameAnalyticsToInsights1741167584277 } from '../common/1741167584277-RenameAnalyticsToInsights';
import { AddScopesColumnToApiKeys1742918400000 } from '../common/1742918400000-AddScopesColumnToApiKeys';
import { ClearEvaluation1745322634000 } from '../common/1745322634000-CleanEvaluations';
import { AddWorkflowStatisticsRootCount1745587087521 } from '../common/1745587087521-AddWorkflowStatisticsRootCount';
import { AddWorkflowArchivedColumn1745934666076 } from '../common/1745934666076-AddWorkflowArchivedColumn';
import { DropRoleTable1745934666077 } from '../common/1745934666077-DropRoleTable';
import { AddProjectDescriptionColumn1747824239000 } from '../common/1747824239000-AddProjectDescriptionColumn';
import { AddLastActiveAtColumnToUser1750252139166 } from '../common/1750252139166-AddLastActiveAtColumnToUser';
import { AddScopeTables1750252139166 } from '../common/1750252139166-AddScopeTables';
import { AddRolesTables1750252139167 } from '../common/1750252139167-AddRolesTables';
import { LinkRoleToUserTable1750252139168 } from '../common/1750252139168-LinkRoleToUserTable';
import { RemoveOldRoleColumn1750252139170 } from '../common/1750252139170-RemoveOldRoleColumn';
import { AddInputsOutputsToTestCaseExecution1752669793000 } from '../common/1752669793000-AddInputsOutputsToTestCaseExecution';
import { LinkRoleToProjectRelationTable1753953244168 } from '../common/1753953244168-LinkRoleToProjectRelationTable';
import { CreateDataStoreTables1754475614601 } from '../common/1754475614601-CreateDataStoreTables';
import { ReplaceDataStoreTablesWithDataTables1754475614602 } from '../common/1754475614602-ReplaceDataStoreTablesWithDataTables';
import { AddTimestampsToRoleAndRoleIndexes1756906557570 } from '../common/1756906557570-AddTimestampsToRoleAndRoleIndexes';
import { AddAudienceColumnToApiKeys1758731786132 } from '../common/1758731786132-AddAudienceColumnToApiKey';
import { ChangeValueTypesForInsights1759399811000 } from '../common/1759399811000-ChangeValueTypesForInsights';
import { CreateChatHubTables1760019379982 } from '../common/1760019379982-CreateChatHubTables';
import { CreateChatHubAgentTable1760020000000 } from '../common/1760020000000-CreateChatHubAgentTable';
import { UniqueRoleNames1760020838000 } from '../common/1760020838000-UniqueRoleNames';
import { CreateOAuthEntities1760116750277 } from '../common/1760116750277-CreateOAuthEntities';
import { CreateWorkflowDependencyTable1760314000000 } from '../common/1760314000000-CreateWorkflowDependencyTable';
import { AddAttachmentsToChatHubMessages1761773155024 } from '../common/1761773155024-AddAttachmentsToChatHubMessages';
import { AddWorkflowDescriptionColumn1762177736257 } from '../common/1762177736257-AddWorkflowDescriptionColumn';
import { BackfillMissingWorkflowHistoryRecords1762763704614 } from '../common/1762763704614-BackfillMissingWorkflowHistoryRecords';
import { AddIsGlobalColumnToCredentialsTable1762771954619 } from '../common/1762771954619-IsGlobalGlobalColumnToCredentialsTable';
import { AddWorkflowHistoryAutoSaveFields1762847206508 } from '../common/1762847206508-AddWorkflowHistoryAutoSaveFields';
import { AddActiveVersionIdColumn1763047800000 } from '../common/1763047800000-AddActiveVersionIdColumn';
import { ActivateExecuteWorkflowTriggerWorkflows1763048000000 } from '../common/1763048000000-ActivateExecuteWorkflowTriggerWorkflows';
import { ChangeOAuthStateColumnToUnboundedVarchar1763572724000 } from '../common/1763572724000-ChangeOAuthStateColumnToUnboundedVarchar';
import { CreateBinaryDataTable1763716655000 } from '../common/1763716655000-CreateBinaryDataTable';
import { CreateWorkflowPublishHistoryTable1764167920585 } from '../common/1764167920585-CreateWorkflowPublishHistoryTable';
import { AddCreatorIdToProjectTable1764276827837 } from '../common/1764276827837-AddCreatorIdToProjectTable';
import { CreateDynamicCredentialResolverTable1764682447000 } from '../common/1764682447000-CreateCredentialResolverTable';
import { AddDynamicCredentialEntryTable1764689388394 } from '../common/1764689388394-AddDynamicCredentialEntryTable';
import { BackfillMissingWorkflowHistoryRecords1765448186933 } from '../common/1765448186933-BackfillMissingWorkflowHistoryRecords';
import { AddResolvableFieldsToCredentials1765459448000 } from '../common/1765459448000-AddResolvableFieldsToCredentials';
import { AddIconToAgentTable1765788427674 } from '../common/1765788427674-AddIconToAgentTable';
import { AddAgentIdForeignKeys1765886667897 } from '../common/1765886667897-AddAgentIdForeignKeys';
import { AddWorkflowVersionIdToExecutionData1765892199653 } from '../common/1765892199653-AddVersionIdToExecutionData';
import { AddPublishedVersionIdToWorkflowDependency1769000000000 } from '../common/1769000000000-AddPublishedVersionIdToWorkflowDependency';
import type { Migration } from '../migration-types';
export const mysqlMigrations: Migration[] = [
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
AddWebhookId1611149998770,
MakeStoppedAtNullable1607431743767,
ChangeDataSize1615306975123,
ChangeCredentialDataSize1620729500000,
CreateTagEntity1617268711084,
UniqueWorkflowNames1620826335440,
CertifyCorrectCollation1623936588000,
AddWaitColumnId1626183952959,
UpdateWorkflowCredentials1630451444017,
AddExecutionEntityIndexes1644424784709,
CreateUserManagement1646992772331,
LowerCaseUserEmail1648740597343,
AddUserSettings1652367743993,
CommunityNodes1652254514003,
AddAPIKeyColumn1652905585850,
IntroducePinData1654090101303,
AddNodeIds1658932910559,
AddJsonKeyPinData1659895550980,
CreateCredentialsUserRole1660062385367,
CreateWorkflowsEditorRole1663755770894,
CreateCredentialUsageTable1665484192213,
RemoveCredentialUsageTable1665754637026,
AddWorkflowVersionIdColumn1669739707125,
WorkflowStatistics1664196174002,
AddTriggerCountColumn1669823906994,
RemoveWorkflowDataLoadedFlag1671726148420,
MessageEventBusDestinations1671535397530,
DeleteExecutionsWithWorkflows1673268682475,
CreateLdapEntities1674509946020,
PurgeInvalidWorkflowConnections1675940580449,
AddStatusToExecutions1674138566000,
MigrateExecutionStatus1676996103000,
UpdateRunningExecutionStatus1677236788851,
CreateExecutionMetadataTable1679416281779,
CreateVariables1677501636753,
AddUserActivatedProperty1681134145996,
MigrateIntegerKeysToString1690000000001,
SeparateExecutionData1690000000030,
FixExecutionDataType1690000000031,
RemoveSkipOwnerSetup1681134145997,
RemoveResetPasswordColumns1690000000030,
CreateWorkflowNameIndex1691088862123,
AddMfaColumns1690000000030,
CreateWorkflowHistoryTable1692967111175,
DisallowOrphanExecutions1693554410387,
ExecutionSoftDelete1693491613982,
AddWorkflowMetadata1695128658538,
ModifyWorkflowHistoryNodesAndConnections1695829275184,
AddGlobalAdminRole1700571993961,
DropRoleMapping1705429061930,
RemoveFailedExecutionStatus1711018413374,
MoveSshKeysToDatabase1711390882123,
RemoveNodesAccess1712044305787,
CreateProject1714133768519,
MakeExecutionStatusNonNullable1714133768521,
AddActivatedAtUserSetting1717498465931,
AddConstraintToExecutionMetadata1720101653148,
CreateInvalidAuthTokenTable1723627610222,
RefactorExecutionIndices1723796243146,
CreateAnnotationTables1724753530828,
AddApiKeysTable1724951148974,
SeparateExecutionCreationFromStart1727427440136,
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
AddDescriptionToTestDefinition1731404028106,
MigrateTestDefinitionKeyToString1731582748663,
CreateTestMetricTable1732271325258,
CreateTestRun1732549866705,
AddMockedNodesColumnToTestDefinition1733133775640,
AddManagedColumnToCredentialsTable1734479635324,
AddProjectIcons1729607673469,
AddStatsColumnsToTestRun1736172058779,
CreateTestCaseExecutionTable1736947513045,
AddErrorColumnsToTestRuns1737715421462,
CreateFolderTable1738709609940,
FixTestDefinitionPrimaryKey1739873751194,
CreateAnalyticsTables1739549398681,
UpdateParentFolderIdColumn1740445074052,
RenameAnalyticsToInsights1741167584277,
AddScopesColumnToApiKeys1742918400000,
AddWorkflowStatisticsRootCount1745587087521,
AddWorkflowArchivedColumn1745934666076,
DropRoleTable1745934666077,
ClearEvaluation1745322634000,
AddProjectDescriptionColumn1747824239000,
AddLastActiveAtColumnToUser1750252139166,
AddScopeTables1750252139166,
AddRolesTables1750252139167,
LinkRoleToUserTable1750252139168,
AddInputsOutputsToTestCaseExecution1752669793000,
CreateDataStoreTables1754475614601,
RemoveOldRoleColumn1750252139170,
ReplaceDataStoreTablesWithDataTables1754475614602,
LinkRoleToProjectRelationTable1753953244168,
AddTimestampsToRoleAndRoleIndexes1756906557570,
AddProjectIdToVariableTable1758794506893,
AddAudienceColumnToApiKeys1758731786132,
ChangeValueTypesForInsights1759399811000,
CreateChatHubTables1760019379982,
CreateChatHubAgentTable1760020000000,
UniqueRoleNames1760020838000,
CreateWorkflowDependencyTable1760314000000,
DropUnusedChatHubColumns1760965142113,
AddWorkflowVersionColumn1761047826451,
ChangeDependencyInfoToJson1761655473000,
AddWorkflowDescriptionColumn1762177736257,
CreateOAuthEntities1760116750277,
BackfillMissingWorkflowHistoryRecords1762763704614,
AddIsGlobalColumnToCredentialsTable1762771954619,
AddWorkflowHistoryAutoSaveFields1762847206508,
AddToolsColumnToChatHubTables1761830340990,
ChangeOAuthStateColumnToUnboundedVarchar1763572724000,
AddAttachmentsToChatHubMessages1761773155024,
AddActiveVersionIdColumn1763047800000,
CreateBinaryDataTable1763716655000,
CreateWorkflowPublishHistoryTable1764167920585,
ActivateExecuteWorkflowTriggerWorkflows1763048000000,
AddCreatorIdToProjectTable1764276827837,
CreateDynamicCredentialResolverTable1764682447000,
AddDynamicCredentialEntryTable1764689388394,
BackfillMissingWorkflowHistoryRecords1765448186933,
AddResolvableFieldsToCredentials1765459448000,
AddIconToAgentTable1765788427674,
AddAgentIdForeignKeys1765886667897,
AddWorkflowVersionIdToExecutionData1765892199653,
AddPublishedVersionIdToWorkflowDependency1769000000000,
];

View File

@@ -414,34 +414,6 @@ describe('WorkflowRepository', () => {
);
});
it('should left join activeVersion with addSelect and use COALESCE for MySQL', async () => {
const mysqlConfig = mockInstance(GlobalConfig, {
database: { type: 'mysqldb' },
});
const mysqlWorkflowRepository = new WorkflowRepository(
entityManager.connection,
mysqlConfig,
folderRepository,
workflowHistoryRepository,
);
jest.spyOn(mysqlWorkflowRepository, 'createQueryBuilder').mockReturnValue(queryBuilder);
const workflowIds = ['workflow1'];
const options = {
filter: { triggerNodeTypes: ['n8n-nodes-base.executeWorkflowTrigger'] },
};
await mysqlWorkflowRepository.getMany(workflowIds, options);
expect(queryBuilder.leftJoin).toHaveBeenCalledWith('workflow.activeVersion', 'activeVersion');
expect(queryBuilder.addSelect).toHaveBeenCalledWith('activeVersion.versionId');
// Should use COALESCE to check activeVersion.nodes first, falling back to workflow.nodes
expect(queryBuilder.andWhere).toHaveBeenCalledWith(
'(COALESCE(activeVersion.nodes, workflow.nodes) LIKE :triggerNodeType0)',
{ triggerNodeType0: '%n8n-nodes-base.executeWorkflowTrigger%' },
);
});
it('should not join activeVersion again if already joined', async () => {
// Simulate activeVersion already being joined
Object.defineProperty(queryBuilder, 'expressionMap', {

View File

@@ -1109,8 +1109,8 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
// Sort the final result after the joins again, because there is no
// guarantee that the order is unchanged after performing joins. Especially
// postgres and MySQL returned to the natural order again, listing
// executions in the order they were created.
// postgres returned to the natural order again, listing executions in the
// order they were created.
if (query.kind === 'range') {
if (query.order?.startedAt === 'DESC') {
const table = qb.escape('e');

View File

@@ -149,7 +149,7 @@ export class WorkflowDependencyRepository extends Repository<WorkflowDependency>
// so the prepareTransactionForSqlite step ensures no concurrent writes happen.
return await tx.existsBy(WorkflowDependency, whereConditions);
}
// For Postgres and MySQL we lock on the workflow row, and only then check the dependency table.
// For Postgres we lock on the workflow row, and only then check the dependency table.
// This prevents a race between two concurrent updates.
const placeholder = this.databaseConfig.type === 'postgresdb' ? '$1' : '?';
const tableName = this.getTableName('workflow_entity');

View File

@@ -2,6 +2,7 @@ import { GlobalConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import { PROJECT_OWNER_ROLE_SLUG } from '@n8n/permissions';
import { DataSource, QueryFailedError, Repository } from '@n8n/typeorm';
import assert from 'node:assert';
import {
ProjectRelation,
@@ -106,22 +107,9 @@ export class WorkflowStatisticsRepository extends Repository<WorkflowStatistics>
)) as Array<{ count: string | number }>;
return Number(queryResult[0].count) === 1 ? 'insert' : 'update';
} else {
const queryResult = (await this.query(
`INSERT INTO ${escapedTableName} (count, rootCount, name, workflowId, workflowName, latestEvent)
VALUES (1, ?, ?, ?, ?, NOW())
ON DUPLICATE KEY
UPDATE
count = count + 1,
rootCount = rootCount + ?,
workflowName = VALUES(workflowName),
latestEvent = NOW();`,
[rootCountIncrement, eventName, workflowId, workflowName ?? null, rootCountIncrement],
)) as { affectedRows: number };
// MySQL returns 2 affected rows on update
return queryResult.affectedRows === 1 ? 'insert' : 'update';
}
assert.fail('Unknown database type');
} catch (error) {
console.log('error', error);
if (error instanceof QueryFailedError) return 'failed';

View File

@@ -101,9 +101,9 @@ export class WorkflowRepository extends Repository<WorkflowEntity> {
const dbType = this.globalConfig.database.type;
if (['postgresdb'].includes(dbType)) {
if (dbType === 'postgresdb') {
qb.where("workflow.settings ->> 'errorWorkflow' IS NOT NULL");
} else if (['mysqldb', 'mariadb', 'sqlite'].includes(dbType)) {
} else if (dbType === 'sqlite') {
qb.where("JSON_EXTRACT(workflow.settings, '$.errorWorkflow') IS NOT NULL");
}
@@ -137,17 +137,11 @@ export class WorkflowRepository extends Repository<WorkflowEntity> {
async updateWorkflowTriggerCount(id: string, triggerCount: number): Promise<UpdateResult> {
const qb = this.createQueryBuilder('workflow');
const dbType = this.globalConfig.database.type;
return await qb
.update()
.set({
triggerCount,
updatedAt: () => {
if (['mysqldb', 'mariadb'].includes(dbType)) {
return 'updatedAt';
}
return '"updatedAt"';
},
updatedAt: () => '"updatedAt"',
})
.where('id = :id', { id })
.execute();
@@ -501,14 +495,10 @@ export class WorkflowRepository extends Repository<WorkflowEntity> {
if (filter.availableInMCP) {
// When filtering for true, only match explicit true values
if (['postgresdb'].includes(dbType)) {
if (dbType === 'postgresdb') {
qb.andWhere("workflow.settings ->> 'availableInMCP' = :availableInMCP", {
availableInMCP: 'true',
});
} else if (['mysqldb', 'mariadb'].includes(dbType)) {
qb.andWhere("JSON_EXTRACT(workflow.settings, '$.availableInMCP') = :availableInMCP", {
availableInMCP: true,
});
} else if (dbType === 'sqlite') {
qb.andWhere("JSON_EXTRACT(workflow.settings, '$.availableInMCP') = :availableInMCP", {
availableInMCP: 1, // SQLite stores booleans as 0/1
@@ -516,16 +506,11 @@ export class WorkflowRepository extends Repository<WorkflowEntity> {
}
} else {
// When filtering for false, match explicit false OR null/undefined (field not set)
if (['postgresdb'].includes(dbType)) {
if (dbType === 'postgresdb') {
qb.andWhere(
"(workflow.settings ->> 'availableInMCP' = :availableInMCP OR workflow.settings ->> 'availableInMCP' IS NULL)",
{ availableInMCP: 'false' },
);
} else if (['mysqldb', 'mariadb'].includes(dbType)) {
qb.andWhere(
"(JSON_EXTRACT(workflow.settings, '$.availableInMCP') = :availableInMCP OR JSON_EXTRACT(workflow.settings, '$.availableInMCP') IS NULL)",
{ availableInMCP: false },
);
} else if (dbType === 'sqlite') {
qb.andWhere(
"(JSON_EXTRACT(workflow.settings, '$.availableInMCP') = :availableInMCP OR JSON_EXTRACT(workflow.settings, '$.availableInMCP') IS NULL)",
@@ -568,7 +553,7 @@ export class WorkflowRepository extends Repository<WorkflowEntity> {
`COALESCE("activeVersion"."nodes"::text, "workflow"."nodes"::text) LIKE :${paramName}`,
);
} else {
// SQLite and MySQL store nodes as text
// SQLite stores nodes as text
conditions.push(`COALESCE(activeVersion.nodes, workflow.nodes) LIKE :${paramName}`);
}
});

View File

@@ -28,21 +28,5 @@ describe('WorkflowRepository', () => {
expect(whereClause).toContain(expectedInQuery);
expect(parameters).toEqual(expectedParameters);
});
it('should return the correct WHERE clause and parameters for mysqldb', () => {
const nodeTypes = ['HTTP Request', 'Set'];
const expectedWhereClause =
"(JSON_SEARCH(JSON_EXTRACT(workflow.nodes, '$[*].type'), 'one', :nodeType0) IS NOT NULL OR JSON_SEARCH(JSON_EXTRACT(workflow.nodes, '$[*].type'), 'one', :nodeType1) IS NOT NULL)";
const expectedParameters = {
nodeType0: 'HTTP Request',
nodeType1: 'Set',
nodeTypes,
};
const { whereClause, parameters } = buildWorkflowsByNodesQuery(nodeTypes, 'mysqldb');
expect(whereClause).toEqual(expectedWhereClause);
expect(parameters).toEqual(expectedParameters);
});
});
});

View File

@@ -1,10 +1,7 @@
/**
* Builds the WHERE clause and parameters for a query to find workflows by node types
*/
export function buildWorkflowsByNodesQuery(
nodeTypes: string[],
dbType: 'postgresdb' | 'mysqldb' | 'mariadb' | 'sqlite',
) {
export function buildWorkflowsByNodesQuery(nodeTypes: string[], dbType: 'postgresdb' | 'sqlite') {
let whereClause: string;
const parameters: Record<string, string | string[]> = { nodeTypes };
@@ -17,22 +14,6 @@ export function buildWorkflowsByNodesQuery(
WHERE node->>'type' = ANY(:nodeTypes)
)`;
break;
case 'mysqldb':
case 'mariadb': {
const conditions = nodeTypes
.map(
(_, i) =>
`JSON_SEARCH(JSON_EXTRACT(workflow.nodes, '$[*].type'), 'one', :nodeType${i}) IS NOT NULL`,
)
.join(' OR ');
whereClause = `(${conditions})`;
nodeTypes.forEach((nodeType, index) => {
parameters[`nodeType${index}`] = nodeType;
});
break;
}
case 'sqlite': {
const conditions = nodeTypes
.map(

View File

@@ -26,7 +26,7 @@ export const objectRetriever: ValueTransformer = {
/**
* Transformer for sqlite JSON columns to mimic JSON-as-object behavior
* from Postgres and MySQL.
* from Postgres.
*/
const jsonColumn: ValueTransformer = {
to: (value: object): string | object =>

View File

@@ -28,13 +28,11 @@
"test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.integration.js --no-coverage",
"test:postgres:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent jest --config=jest.config.integration.testcontainers.js --no-coverage",
"test:mariadb": "echo true",
"test:mysql": "echo true",
"test:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest",
"test:dev:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest --watch",
"test:sqlite:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest --config=jest.config.integration.js",
"test:postgres:win": "set N8N_LOG_LEVEL=silent&& set DB_TYPE=postgresdb&& set DB_POSTGRESDB_SCHEMA=alt_schema&& set DB_TABLE_PREFIX=test_&& jest --config=jest.config.integration.js --no-coverage",
"test:mariadb:win": "echo true",
"test:mysql:win": "echo true",
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\""
},
"bin": {
@@ -152,7 +150,6 @@
"ldapts": "4.2.6",
"lodash": "catalog:",
"luxon": "catalog:",
"mysql2": "catalog:",
"n8n-core": "workspace:*",
"n8n-editor-ui": "workspace:*",
"n8n-nodes-base": "workspace:*",

View File

@@ -79,7 +79,7 @@ describe('ExecutionRepository', () => {
});
describe('updateExistingExecution', () => {
test.each(['sqlite', 'postgresdb', 'mysqldb'] as const)(
test.each(['sqlite', 'postgresdb'] as const)(
'should update execution and data in transaction on %s',
async (dbType) => {
globalConfig.database.type = dbType;

View File

@@ -627,7 +627,7 @@ export class ExecutionService {
['execution'],
);
// Upsert behavior differs for Postgres, MySQL and sqlite,
// Upsert behavior differs for Postgres and sqlite,
// so we need to fetch the annotation to get the ID
const annotation = await this.executionAnnotationRepository.findOneOrFail({
where: {

View File

@@ -1,45 +0,0 @@
import { mockInstance } from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import { RemovedDatabaseTypesRule } from '../removed-database-types.rule';
describe('RemovedDatabaseTypesRule', () => {
let rule: RemovedDatabaseTypesRule;
let globalConfig: GlobalConfig;
beforeEach(() => {
globalConfig = mockInstance(GlobalConfig);
rule = new RemovedDatabaseTypesRule(globalConfig);
});
describe('detect()', () => {
it('should not be affected when using PostgreSQL', async () => {
globalConfig.database.type = 'postgresdb';
const result = await rule.detect();
expect(result.isAffected).toBe(false);
expect(result.instanceIssues).toHaveLength(0);
});
it('should be affected when using MySQL', async () => {
globalConfig.database.type = 'mysqldb';
const result = await rule.detect();
expect(result.isAffected).toBe(true);
expect(result.instanceIssues).toHaveLength(1);
expect(result.instanceIssues[0].title).toBe('MySQL database type removed');
});
it('should be affected when using MariaDB', async () => {
globalConfig.database.type = 'mariadb';
const result = await rule.detect();
expect(result.isAffected).toBe(true);
expect(result.instanceIssues).toHaveLength(1);
expect(result.instanceIssues[0].title).toBe('MariaDB database type removed');
});
});
});

View File

@@ -7,7 +7,6 @@ import { OAuthCallbackAuthRule } from './oauth-callback-auth.rule';
import { ProcessEnvAccessRule } from './process-env-access.rule';
import { PyodideRemovedRule } from './pyodide-removed.rule';
import { QueueWorkerMaxStalledCountRule } from './queue-worker-max-stalled-count.rule';
import { RemovedDatabaseTypesRule } from './removed-database-types.rule';
import { RemovedNodesRule } from './removed-nodes.rule';
import { SettingsFilePermissionsRule } from './settings-file-permissions.rule';
import { TaskRunnerDockerImageRule } from './task-runner-docker-image.rule';
@@ -33,7 +32,6 @@ const v2Rules = [
WorkflowHooksDeprecatedRule,
QueueWorkerMaxStalledCountRule,
TunnelOptionRule,
RemovedDatabaseTypesRule,
SettingsFilePermissionsRule,
TaskRunnersRule,
TaskRunnerDockerImageRule,

View File

@@ -1,56 +0,0 @@
import { GlobalConfig } from '@n8n/config';
import { Service } from '@n8n/di';
import type {
BreakingChangeRuleMetadata,
IBreakingChangeInstanceRule,
InstanceDetectionReport,
} from '../../types';
import { BreakingChangeCategory } from '../../types';
@Service()
export class RemovedDatabaseTypesRule implements IBreakingChangeInstanceRule {
constructor(private readonly globalConfig: GlobalConfig) {}
id: string = 'removed-database-types-v2';
getMetadata(): BreakingChangeRuleMetadata {
return {
version: 'v2',
title: 'MySQL/MariaDB database types removed',
description:
'MySQL and MariaDB database types have been completely removed and will cause n8n to fail on startup',
category: BreakingChangeCategory.database,
severity: 'critical',
documentationUrl: 'https://docs.n8n.io/2-0-breaking-changes/#drop-mysqlmariadb-support',
};
}
async detect(): Promise<InstanceDetectionReport> {
const result: InstanceDetectionReport = {
isAffected: false,
instanceIssues: [],
recommendations: [],
};
const dbType = this.globalConfig.database.type;
if (dbType === 'mysqldb' || dbType === 'mariadb') {
result.isAffected = true;
result.instanceIssues.push({
title: `${dbType === 'mysqldb' ? 'MySQL' : 'MariaDB'} database type removed`,
description:
'MySQL and MariaDB database types have been completely removed in v2. n8n will fail to start with this database configuration.',
level: 'error',
});
result.recommendations.push({
action: 'Migrate to PostgreSQL or SQLite before upgrading',
description:
'You must migrate your database to PostgreSQL or SQLite before upgrading to v2. Use the database migration tool if available, or export/import your workflows and credentials.',
});
}
return result;
}
}

View File

@@ -182,7 +182,7 @@ describe('DataTableColumnRepository', () => {
it('should call DDL service with correct database type', async () => {
// Arrange
const newName = 'new_valid_name';
const dbTypes = ['postgres', 'mysql', 'sqlite'] as const;
const dbTypes = ['postgres', 'sqlite'] as const;
for (const dbType of dbTypes) {
mockEntityManager.existsBy.mockResolvedValue(false);

Some files were not shown because too many files have changed in this diff Show More