diff --git a/.github/workflows/test-db.yml b/.github/workflows/test-db.yml index 2f98e460812..6344c67de8c 100644 --- a/.github/workflows/test-db.yml +++ b/.github/workflows/test-db.yml @@ -11,6 +11,7 @@ on: - packages/cli/src/modules/**/*.entity.ts - packages/cli/src/modules/**/*.repository.ts - packages/cli/test/integration/** + - packages/cli/test/migration/** - packages/cli/test/shared/db/** - packages/@n8n/db/** - packages/cli/**/__tests__/** @@ -53,6 +54,10 @@ jobs: working-directory: packages/cli run: pnpm test:sqlite + - name: Test SQLite Pooled Migrations + working-directory: packages/cli + run: pnpm test:sqlite:migrations:tc + postgres: name: Postgres needs: build @@ -69,9 +74,13 @@ jobs: - name: Pre-pull Test Container Images run: npx tsx packages/testing/containers/pull-test-images.ts || true - - name: Test Postgres + - name: Test Postgres Integration working-directory: packages/cli - run: pnpm test:postgres:tc + run: pnpm test:postgres:integration:tc + + - name: Test Postgres Migrations + working-directory: packages/cli + run: pnpm test:postgres:migrations:tc notify-on-failure: name: Notify Slack on failure diff --git a/packages/@n8n/backend-test-utils/src/migration-test-helpers.ts b/packages/@n8n/backend-test-utils/src/migration-test-helpers.ts index 6801e5a6214..8c0bb0fd9b8 100644 --- a/packages/@n8n/backend-test-utils/src/migration-test-helpers.ts +++ b/packages/@n8n/backend-test-utils/src/migration-test-helpers.ts @@ -1,7 +1,7 @@ import { GlobalConfig } from '@n8n/config'; import { type DatabaseType, DbConnection, type Migration } from '@n8n/db'; import { Container } from '@n8n/di'; -import { DataSource, type QueryRunner } from '@n8n/typeorm'; +import { DataSource, type ObjectLiteral, type QueryRunner } from '@n8n/typeorm'; import { UnexpectedError } from 'n8n-workflow'; async function reinitializeDataConnection(): Promise { @@ -10,6 +10,21 @@ async function reinitializeDataConnection(): Promise { await dbConnection.init(); } +/** + * Get the properly qualified migrations table name for the current database. + */ +function getMigrationsTableName(): string { + const globalConfig = Container.get(GlobalConfig); + const dbType = globalConfig.database.type; + const tablePrefix = globalConfig.database.tablePrefix; + + if (dbType === 'postgresdb') { + const schema = globalConfig.database.postgresdb.schema; + return `${schema}."${tablePrefix}migrations"`; + } + return `"${tablePrefix}migrations"`; +} + /** * Test migration context with database-specific helpers (similar to MigrationContext). */ @@ -24,6 +39,7 @@ export interface TestMigrationContext { tableName(name: string): string; indexName(name: string): string; }; + runQuery: (sql: string, namedParameters?: ObjectLiteral) => Promise; } /** @@ -47,6 +63,32 @@ export function createTestMigrationContext(dataSource: DataSource): TestMigratio tableName: (name) => queryRunner.connection.driver.escape(`${tablePrefix}${name}`), indexName: (name) => queryRunner.connection.driver.escape(`IDX_${tablePrefix}${name}`), }, + runQuery: async (sql: string, namedParameters?: ObjectLiteral) => { + if (namedParameters) { + if (dbType === 'postgresdb') { + // For PostgreSQL, convert named parameters to positional ($1, $2, etc.) + // This handles JSON columns properly which don't work well with TypeORM's escapeQueryWithParameters + // Use negative lookbehind to avoid matching PostgreSQL's :: cast operator + let paramIndex = 1; + const paramValues: unknown[] = []; + const convertedSql = sql.replace(/(? { + paramValues.push(namedParameters[paramName] as unknown); + return `$${paramIndex++}`; + }); + return (await queryRunner.query(convertedSql, paramValues)) as T; + } else { + // For MySQL/SQLite, use TypeORM's escapeQueryWithParameters + const [query, parameters] = queryRunner.connection.driver.escapeQueryWithParameters( + sql, + namedParameters, + {}, + ); + return (await queryRunner.query(query, parameters)) as T; + } + } else { + return (await queryRunner.query(sql)) as T; + } + }, }; } @@ -97,7 +139,7 @@ export async function undoLastSingleMigration(): Promise { // Get the last executed migration from the migrations table const executedMigrations = await dataSource.query>( - 'SELECT * FROM migrations ORDER BY timestamp DESC LIMIT 1', + `SELECT * FROM ${getMigrationsTableName()} ORDER BY timestamp DESC LIMIT 1`, ); if (executedMigrations.length === 0) { diff --git a/packages/cli/jest.config.integration.js b/packages/cli/jest.config.integration.js index a9576058864..1b486b842c8 100644 --- a/packages/cli/jest.config.integration.js +++ b/packages/cli/jest.config.integration.js @@ -14,8 +14,12 @@ module.exports = { coveragePathIgnorePatterns: ['/src/databases/migrations/'], testTimeout: 10_000, prettierPath: null, - // Run integration tests from both test/integration and src/ directories + // Run integration tests from test/integration, test/migration and src/ directories testRegex: undefined, // Override base config testRegex - testMatch: ['/test/integration/**/*.test.ts', '/src/**/*.integration.test.ts'], + testMatch: [ + '/test/integration/**/*.test.ts', + '/test/migration/**/*.test.ts', + '/src/**/*.integration.test.ts', + ], testPathIgnorePatterns: ['/dist/', '/node_modules/'], }; diff --git a/packages/cli/jest.config.migration.js b/packages/cli/jest.config.migration.js new file mode 100644 index 00000000000..5eeb150415a --- /dev/null +++ b/packages/cli/jest.config.migration.js @@ -0,0 +1,8 @@ +/** @type {import('jest').Config} */ +module.exports = { + ...require('./jest.config.integration.js'), + // Override testMatch to only run migration tests + testMatch: ['/test/migration/**/*.test.ts'], + // Run migration tests sequentially to avoid database conflicts + maxWorkers: 1, +}; diff --git a/packages/cli/jest.config.migration.testcontainers.js b/packages/cli/jest.config.migration.testcontainers.js new file mode 100644 index 00000000000..df4509fe534 --- /dev/null +++ b/packages/cli/jest.config.migration.testcontainers.js @@ -0,0 +1,23 @@ +/** + * Jest config for migration tests using testcontainers. + * Migration tests must run sequentially (maxWorkers: 1) because they: + * - Share the same database instance + * - Perform rollbacks that affect subsequent tests + * - Clear and reinitialize the database in beforeEach hooks + * + * Usage: pnpm test:postgres:migrations:tc + */ + +/** @type {import('jest').Config} */ +module.exports = { + ...require('./jest.config.migration.js'), + globalSetup: '/test/setup-testcontainers.js', + globalTeardown: '/test/teardown-testcontainers.js', + // Longer timeout for container startup + testTimeout: 30_000, + // Disable caching - testcontainers' signal-exit conflicts with Jest's + // transform cache (write-file-atomic). Performance impact is minimal + // since integration tests are I/O-bound, not transform-bound. + cache: false, + forceExit: true, +}; diff --git a/packages/cli/package.json b/packages/cli/package.json index 8b276a1e5b5..1dbfcc4ddb3 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -25,8 +25,13 @@ "test:integration": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.integration.js", "test:dev": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --watch", "test:sqlite": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.integration.js --no-coverage", + "test:sqlite:migrations": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.migration.js --no-coverage", + "test:sqlite:migrations:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.migration.testcontainers.js --no-coverage", "test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.integration.js --no-coverage", + "test:postgres:migrations": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.migration.js --no-coverage", + "test:postgres:migrations:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.migration.testcontainers.js --no-coverage", "test:postgres:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent jest --config=jest.config.integration.testcontainers.js --no-coverage", + "test:postgres:integration:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent jest --config=jest.config.integration.testcontainers.js --no-coverage --testPathIgnorePatterns=/test/migration/", "test:mariadb": "echo true", "test:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest", "test:dev:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest --watch", diff --git a/packages/cli/test/migration/1760020838000-unique-role-names.test.ts b/packages/cli/test/migration/1760020838000-unique-role-names.test.ts index f2b93f8126e..520bb0f50bb 100644 --- a/packages/cli/test/migration/1760020838000-unique-role-names.test.ts +++ b/packages/cli/test/migration/1760020838000-unique-role-names.test.ts @@ -36,6 +36,11 @@ describe('UniqueRoleNames Migration', () => { dataSource = Container.get(DataSource); + // Clear database to start with clean slate + const context = createTestMigrationContext(dataSource); + await context.queryRunner.clearDatabase(); + await context.queryRunner.release(); + // Run migrations up to (but not including) target migration await initDbUpToMigration(MIGRATION_NAME); }); @@ -62,17 +67,17 @@ describe('UniqueRoleNames Migration', () => { const roleType = roleData.roleType ?? 'project'; const description = roleData.description ?? null; - await context.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}, ${descriptionColumn}) VALUES (?, ?, ?, ?, ?, ?, ?)`, - [ - roleData.slug, - roleData.displayName, - roleData.createdAt, - roleData.createdAt, + await context.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}, ${descriptionColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType, :description)`, + { + slug: roleData.slug, + displayName: roleData.displayName, + createdAt: roleData.createdAt, + updatedAt: roleData.createdAt, systemRole, roleType, description, - ], + }, ); } @@ -85,8 +90,9 @@ describe('UniqueRoleNames Migration', () => { const displayNameColumn = context.escape.columnName('displayName'); const createdAtColumn = context.escape.columnName('createdAt'); - const roles = await context.queryRunner.query( - `SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName, ${createdAtColumn} as createdAt FROM ${tableName} ORDER BY ${createdAtColumn} ASC`, + const roles = await context.runQuery( + `SELECT ${slugColumn}, ${displayNameColumn}, ${createdAtColumn} FROM ${tableName} ORDER BY ${createdAtColumn} ASC`, + {}, ); return roles; @@ -261,26 +267,41 @@ describe('UniqueRoleNames Migration', () => { ); expect(uniqueIndex).toBeDefined(); } else if (postMigrationContext.isPostgres) { - const result = await postMigrationContext.queryRunner.query( - `SELECT indexname FROM pg_indexes WHERE tablename = ${tableName} AND indexname = ${indexName}`, + // For PostgreSQL, we need the actual table/index names without quotes + // The escaped indexName has quotes, so we need to strip them + const actualTableName = postMigrationContext.tablePrefix + 'role'; + const actualIndexName = indexName.replace(/"/g, ''); // Remove quotes from escaped name + const result = await postMigrationContext.runQuery( + 'SELECT indexname FROM pg_indexes WHERE tablename = :tableName AND indexname = :indexName', + { tableName: actualTableName, indexName: actualIndexName }, ); expect(result).toHaveLength(1); // Verify index is unique - const uniqueCheck = await postMigrationContext.queryRunner.query( + const uniqueCheck = await postMigrationContext.runQuery< + Array<{ index_name: string; indisunique: boolean }> + >( `SELECT i.relname as index_name, ix.indisunique FROM pg_class t JOIN pg_index ix ON t.oid = ix.indrelid JOIN pg_class i ON i.oid = ix.indexrelid - WHERE t.relname = ${tableName} AND i.relname = ${indexName}`, + WHERE t.relname = :tableName AND i.relname = :indexName`, + { tableName: actualTableName, indexName: actualIndexName }, ); expect(uniqueCheck[0].indisunique).toBe(true); } // Verify index enforces uniqueness by attempting duplicate insert - await postMigrationContext.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${postMigrationContext.escape.columnName('createdAt')}, ${postMigrationContext.escape.columnName('updatedAt')}, ${postMigrationContext.escape.columnName('systemRole')}, ${postMigrationContext.escape.columnName('roleType')}) VALUES (?, ?, ?, ?, ?, ?)`, - ['test-duplicate-attempt', 'Unique Test Name', new Date(), new Date(), false, 'project'], + await postMigrationContext.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${postMigrationContext.escape.columnName('createdAt')}, ${postMigrationContext.escape.columnName('updatedAt')}, ${postMigrationContext.escape.columnName('systemRole')}, ${postMigrationContext.escape.columnName('roleType')}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`, + { + slug: 'test-duplicate-attempt', + displayName: 'Unique Test Name', + createdAt: new Date(), + updatedAt: new Date(), + systemRole: false, + roleType: 'project', + }, ); const attemptDuplicateInsert = async () => { @@ -326,8 +347,13 @@ describe('UniqueRoleNames Migration', () => { ); expect(uniqueIndex).toBeDefined(); } else if (upContext.isPostgres) { - const result = await upContext.queryRunner.query( - `SELECT indexname FROM pg_indexes WHERE tablename = ${tableName} AND indexname = ${indexName}`, + // For PostgreSQL, we need the actual table/index names without quotes + // The escaped indexName has quotes, so we need to strip them + const actualTableName = upContext.tablePrefix + 'role'; + const actualIndexName = indexName.replace(/"/g, ''); // Remove quotes from escaped name + const result = await upContext.runQuery( + 'SELECT indexname FROM pg_indexes WHERE tablename = :tableName AND indexname = :indexName', + { tableName: actualTableName, indexName: actualIndexName }, ); expect(result).toHaveLength(1); } @@ -341,7 +367,7 @@ describe('UniqueRoleNames Migration', () => { // Verify index is removed (DB-specific queries) if (postRollbackContext.isSqlite) { - const indexes = await postRollbackContext.queryRunner.query( + const indexes = await postRollbackContext.runQuery>( `PRAGMA index_list(${tableName})`, ); const uniqueIndex = indexes.find( @@ -350,8 +376,12 @@ describe('UniqueRoleNames Migration', () => { ); expect(uniqueIndex).toBeUndefined(); } else if (postRollbackContext.isPostgres) { - const result = await postRollbackContext.queryRunner.query( - `SELECT indexname FROM pg_indexes WHERE tablename = ${tableName} AND indexname = ${indexName}`, + const result = await postRollbackContext.runQuery( + 'SELECT indexname FROM pg_indexes WHERE tablename = :tableName AND indexname = :indexName', + { + tableName: postRollbackContext.tablePrefix + 'role', + indexName: indexName.replace(/"/g, ''), + }, ); expect(result).toHaveLength(0); } @@ -365,20 +395,34 @@ describe('UniqueRoleNames Migration', () => { const systemRoleColumn = postRollbackContext.escape.columnName('systemRole'); const roleTypeColumn = postRollbackContext.escape.columnName('roleType'); - await postRollbackContext.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (?, ?, ?, ?, ?, ?)`, - ['rollback-test-1', 'Duplicate After Rollback', new Date(), new Date(), false, 'project'], + await postRollbackContext.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`, + { + slug: 'rollback-test-1', + displayName: 'Duplicate After Rollback', + createdAt: new Date(), + updatedAt: new Date(), + systemRole: false, + roleType: 'project', + }, ); - await postRollbackContext.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (?, ?, ?, ?, ?, ?)`, - ['rollback-test-2', 'Duplicate After Rollback', new Date(), new Date(), false, 'project'], + await postRollbackContext.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`, + { + slug: 'rollback-test-2', + displayName: 'Duplicate After Rollback', + createdAt: new Date(), + updatedAt: new Date(), + systemRole: false, + roleType: 'project', + }, ); // Verify both roles were inserted successfully - const duplicateRoles = await postRollbackContext.queryRunner.query( - `SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName FROM ${tableName} WHERE ${displayNameColumn} = ?`, - ['Duplicate After Rollback'], + const duplicateRoles = await postRollbackContext.runQuery( + `SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName FROM ${tableName} WHERE ${displayNameColumn} = :displayName`, + { displayName: 'Duplicate After Rollback' }, ); expect(duplicateRoles).toHaveLength(2); @@ -401,19 +445,28 @@ describe('UniqueRoleNames Migration', () => { const roleTypeColumn = context.escape.columnName('roleType'); // Insert role with unique displayName - await context.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (?, ?, ?, ?, ?, ?)`, - ['unique-role-test', 'Unique Role Name', new Date(), new Date(), false, 'project'], + await context.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`, + { + slug: 'unique-role-test', + displayName: 'Unique Role Name', + createdAt: new Date(), + updatedAt: new Date(), + systemRole: false, + roleType: 'project', + }, ); // Verify retrieval using SQL - const [result] = await context.queryRunner.query( - `SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName FROM ${tableName} WHERE ${slugColumn} = ?`, - ['unique-role-test'], + const results = await context.runQuery>( + `SELECT ${slugColumn}, ${displayNameColumn} FROM ${tableName} WHERE ${slugColumn} = :slug`, + { slug: 'unique-role-test' }, ); - expect(result).toBeDefined(); - expect(result.displayName).toBe('Unique Role Name'); + expect(results).toHaveLength(1); + // Access using the actual database column name + const row = results[0] as Record; + expect(row.displayName ?? row.displayname).toBe('Unique Role Name'); // Cleanup await context.queryRunner.release(); diff --git a/packages/cli/test/migration/1763048000000-activate-execute-workflow-trigger-workflows.test.ts b/packages/cli/test/migration/1763048000000-activate-execute-workflow-trigger-workflows.test.ts index 685936b1b54..93bb6f406ae 100644 --- a/packages/cli/test/migration/1763048000000-activate-execute-workflow-trigger-workflows.test.ts +++ b/packages/cli/test/migration/1763048000000-activate-execute-workflow-trigger-workflows.test.ts @@ -4,6 +4,7 @@ import { runSingleMigration, type TestMigrationContext, } from '@n8n/backend-test-utils'; +import { GlobalConfig } from '@n8n/config'; import { DbConnection } from '@n8n/db'; import { Container } from '@n8n/di'; import { DataSource } from '@n8n/typeorm'; @@ -40,6 +41,11 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => { dataSource = Container.get(DataSource); + // Clear database to start with clean slate + const context = createTestMigrationContext(dataSource); + await context.queryRunner.clearDatabase(); + await context.queryRunner.release(); + await initDbUpToMigration(MIGRATION_NAME); }); @@ -53,7 +59,9 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => { workflowData: WorkflowData, ): Promise { const tableName = context.escape.tableName('workflow_entity'); + const historyTableName = context.escape.tableName('workflow_history'); const idColumn = context.escape.columnName('id'); + const workflowIdColumn = context.escape.columnName('workflowId'); const nameColumn = context.escape.columnName('name'); const nodesColumn = context.escape.columnName('nodes'); const connectionsColumn = context.escape.columnName('connections'); @@ -62,18 +70,34 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => { const createdAtColumn = context.escape.columnName('createdAt'); const updatedAtColumn = context.escape.columnName('updatedAt'); - await context.queryRunner.query( - `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, - [ - workflowData.id, - workflowData.name, - JSON.stringify(workflowData.nodes), - JSON.stringify(workflowData.connections), - workflowData.active, - workflowData.versionId, - workflowData.createdAt, - workflowData.updatedAt, - ], + // Insert workflow_entity record first (workflow_history references it) + await context.runQuery( + `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:id, :name, :nodes, :connections, :active, :versionId, :createdAt, :updatedAt)`, + { + id: workflowData.id, + name: workflowData.name, + nodes: JSON.stringify(workflowData.nodes), + connections: JSON.stringify(workflowData.connections), + active: workflowData.active, + versionId: workflowData.versionId, + createdAt: workflowData.createdAt, + updatedAt: workflowData.updatedAt, + }, + ); + + // Insert workflow_history record (required for activeVersionId foreign key) + const authorsColumn = context.escape.columnName('authors'); + await context.runQuery( + `INSERT INTO ${historyTableName} (${versionIdColumn}, ${workflowIdColumn}, ${nodesColumn}, ${connectionsColumn}, ${authorsColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:versionId, :workflowId, :nodes, :connections, :authors, :createdAt, :updatedAt)`, + { + versionId: workflowData.versionId, + workflowId: workflowData.id, + nodes: JSON.stringify(workflowData.nodes), + connections: JSON.stringify(workflowData.connections), + authors: 'test-user', + createdAt: workflowData.createdAt, + updatedAt: workflowData.updatedAt, + }, ); } @@ -89,12 +113,24 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => { const versionIdColumn = context.escape.columnName('versionId'); const activeVersionIdColumn = context.escape.columnName('activeVersionId'); - const [workflow] = await context.queryRunner.query( - `SELECT ${idColumn} as id, ${nameColumn} as name, ${activeColumn} as active, ${nodesColumn} as nodes, ${versionIdColumn} as versionId, ${activeVersionIdColumn} as activeVersionId FROM ${tableName} WHERE ${idColumn} = ?`, - [id], + // For PostgreSQL, cast JSON column to text to get string representation + const nodesSelect = context.isPostgres ? `${nodesColumn}::text` : nodesColumn; + + const workflows = await context.runQuery< + Array<{ + id: string; + name: string; + active: boolean; + nodes: string; + versionId: string; + activeVersionId: string | null; + }> + >( + `SELECT ${idColumn}, ${nameColumn}, ${activeColumn}, ${nodesSelect}, ${versionIdColumn}, ${activeVersionIdColumn} FROM ${tableName} WHERE ${idColumn} = :id`, + { id }, ); - return workflow; + return workflows[0] as WorkflowRow | undefined; } describe('Up Migration', () => { @@ -396,30 +432,35 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => { }); // Insert workflow with invalid JSON containing unescaped control characters - const tableName = context.escape.tableName('workflow_entity'); - const idColumn = context.escape.columnName('id'); - const nameColumn = context.escape.columnName('name'); - const nodesColumn = context.escape.columnName('nodes'); - const connectionsColumn = context.escape.columnName('connections'); - const activeColumn = context.escape.columnName('active'); - const versionIdColumn = context.escape.columnName('versionId'); - const createdAtColumn = context.escape.columnName('createdAt'); - const updatedAtColumn = context.escape.columnName('updatedAt'); + // Note: PostgreSQL enforces strict JSON validation and won't allow invalid JSON, + // so we skip this test data for PostgreSQL + if (!context.isPostgres) { + const tableName = context.escape.tableName('workflow_entity'); + const idColumn = context.escape.columnName('id'); + const nameColumn = context.escape.columnName('name'); + const nodesColumn = context.escape.columnName('nodes'); + const connectionsColumn = context.escape.columnName('connections'); + const activeColumn = context.escape.columnName('active'); + const versionIdColumn = context.escape.columnName('versionId'); + const createdAtColumn = context.escape.columnName('createdAt'); + const updatedAtColumn = context.escape.columnName('updatedAt'); - await context.queryRunner.query( - `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, - [ - workflowIds.invalidJson, - 'Test Invalid JSON with Control Characters', - // Invalid JSON with unescaped newline (simulating the production issue) - '[{"id":"test","type":"n8n-nodes-base.executeWorkflowTrigger","parameters":{"inputSource":"passthrough","description":"MEASUREMENT DETAILS: \\"N/A\\"\\nMEASUREMENT TYPE: \\"N/A\\"\n"},"typeVersion":1,"position":[0,0]}]', - '{}', - false, - randomUUID(), - new Date(), - new Date(), - ], - ); + await context.runQuery( + `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:id, :name, :nodes, :connections, :active, :versionId, :createdAt, :updatedAt)`, + { + id: workflowIds.invalidJson, + name: 'Test Invalid JSON with Control Characters', + // Invalid JSON with unescaped newline (simulating the production issue) + nodes: + '[{"id":"test","type":"n8n-nodes-base.executeWorkflowTrigger","parameters":{"inputSource":"passthrough","description":"MEASUREMENT DETAILS: \\"N/A\\"\\nMEASUREMENT TYPE: \\"N/A\\"\n"},"typeVersion":1,"position":[0,0]}]', + connections: '{}', + active: false, + versionId: randomUUID(), + createdAt: new Date(), + updatedAt: new Date(), + }, + ); + } await runSingleMigration(MIGRATION_NAME); await context.queryRunner.release(); @@ -554,18 +595,25 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => { await context.queryRunner.release(); }); - it('should skip workflows with invalid JSON containing unescaped control characters', async () => { - const context = createTestMigrationContext(dataSource); - const workflow = await getWorkflowById(context, workflowIds.invalidJson); + // PostgreSQL enforces strict JSON validation and won't allow invalid JSON to be inserted, + // so we skip this test for PostgreSQL + // eslint-disable-next-line n8n-local-rules/no-skipped-tests + const testFn = Container.get(GlobalConfig).database.type === 'postgresdb' ? it.skip : it; + testFn( + 'should skip workflows with invalid JSON containing unescaped control characters', + async () => { + const context = createTestMigrationContext(dataSource); + const workflow = await getWorkflowById(context, workflowIds.invalidJson); - // Workflow should remain inactive and unchanged - expect(workflow?.active).toBeFalsy(); - expect(workflow?.activeVersionId).toBeNull(); + // Workflow should remain inactive and unchanged + expect(workflow?.active).toBeFalsy(); + expect(workflow?.activeVersionId).toBeNull(); - // Verify the invalid JSON is still in the database (unchanged) - expect(workflow?.nodes).toContain('MEASUREMENT DETAILS'); + // Verify the invalid JSON is still in the database (unchanged) + expect(workflow?.nodes).toContain('MEASUREMENT DETAILS'); - await context.queryRunner.release(); - }); + await context.queryRunner.release(); + }, + ); }); }); diff --git a/packages/cli/test/migration/1766064542000-add-workflow-publish-scope-to-project-roles.test.ts b/packages/cli/test/migration/1766064542000-add-workflow-publish-scope-to-project-roles.test.ts index b273cb86294..035a1c10196 100644 --- a/packages/cli/test/migration/1766064542000-add-workflow-publish-scope-to-project-roles.test.ts +++ b/packages/cli/test/migration/1766064542000-add-workflow-publish-scope-to-project-roles.test.ts @@ -43,6 +43,11 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => { dataSource = Container.get(DataSource); + // Clear database to start with clean slate + const context = createTestMigrationContext(dataSource); + await context.queryRunner.clearDatabase(); + await context.queryRunner.release(); + await initDbUpToMigration(MIGRATION_NAME); }); @@ -63,15 +68,19 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => { const displayNameColumn = context.escape.columnName('displayName'); const descriptionColumn = context.escape.columnName('description'); - const existingScope = await context.queryRunner.query( - `SELECT ${slugColumn} FROM ${tableName} WHERE ${slugColumn} = ?`, - [scopeData.slug], + const existingScope = await context.runQuery( + `SELECT ${slugColumn} FROM ${tableName} WHERE ${slugColumn} = :slug`, + { slug: scopeData.slug }, ); if (existingScope.length === 0) { - await context.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${descriptionColumn}) VALUES (?, ?, ?)`, - [scopeData.slug, scopeData.displayName, scopeData.description], + await context.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${descriptionColumn}) VALUES (:slug, :displayName, :description)`, + { + slug: scopeData.slug, + displayName: scopeData.displayName, + description: scopeData.description, + }, ); } } @@ -90,9 +99,16 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => { const systemRole = roleData.systemRole ?? false; - await context.queryRunner.query( - `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${roleTypeColumn}, ${systemRoleColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (?, ?, ?, ?, ?, ?)`, - [roleData.slug, roleData.displayName, roleData.roleType, systemRole, new Date(), new Date()], + await context.runQuery( + `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${roleTypeColumn}, ${systemRoleColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:slug, :displayName, :roleType, :systemRole, :createdAt, :updatedAt)`, + { + slug: roleData.slug, + displayName: roleData.displayName, + roleType: roleData.roleType, + systemRole, + createdAt: new Date(), + updatedAt: new Date(), + }, ); } @@ -107,9 +123,9 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => { const roleSlugColumn = context.escape.columnName('roleSlug'); const scopeSlugColumn = context.escape.columnName('scopeSlug'); - await context.queryRunner.query( - `INSERT INTO ${tableName} (${roleSlugColumn}, ${scopeSlugColumn}) VALUES (?, ?)`, - [roleScopeData.roleSlug, roleScopeData.scopeSlug], + await context.runQuery( + `INSERT INTO ${tableName} (${roleSlugColumn}, ${scopeSlugColumn}) VALUES (:roleSlug, :scopeSlug)`, + { roleSlug: roleScopeData.roleSlug, scopeSlug: roleScopeData.scopeSlug }, ); } @@ -124,12 +140,12 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => { const roleSlugColumn = context.escape.columnName('roleSlug'); const scopeSlugColumn = context.escape.columnName('scopeSlug'); - const scopes = await context.queryRunner.query( - `SELECT ${roleSlugColumn} as roleSlug, ${scopeSlugColumn} as scopeSlug FROM ${tableName} WHERE ${roleSlugColumn} = ?`, - [roleSlug], + const scopes = await context.runQuery>( + `SELECT ${roleSlugColumn}, ${scopeSlugColumn} FROM ${tableName} WHERE ${roleSlugColumn} = :roleSlug`, + { roleSlug }, ); - return scopes; + return scopes as RoleScopeRow[]; } /** @@ -143,12 +159,12 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => { const roleSlugColumn = context.escape.columnName('roleSlug'); const scopeSlugColumn = context.escape.columnName('scopeSlug'); - const roleScopeEntries = await context.queryRunner.query( - `SELECT ${roleSlugColumn} as roleSlug, ${scopeSlugColumn} as scopeSlug FROM ${tableName} WHERE ${scopeSlugColumn} = ?`, - [scopeSlug], + const roleScopeEntries = await context.runQuery>( + `SELECT ${roleSlugColumn}, ${scopeSlugColumn} FROM ${tableName} WHERE ${scopeSlugColumn} = :scopeSlug`, + { scopeSlug }, ); - return roleScopeEntries; + return roleScopeEntries as RoleScopeRow[]; } describe('up migration', () => { diff --git a/packages/cli/test/migration/1767018516000-change-workflow-statistics-fk-to-no-action.test.ts b/packages/cli/test/migration/1767018516000-change-workflow-statistics-fk-to-no-action.test.ts index f47e53acaeb..d67c1faa97f 100644 --- a/packages/cli/test/migration/1767018516000-change-workflow-statistics-fk-to-no-action.test.ts +++ b/packages/cli/test/migration/1767018516000-change-workflow-statistics-fk-to-no-action.test.ts @@ -34,11 +34,19 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { let dataSource: DataSource; beforeAll(async () => { - // Initialize DB connection without running migrations + // Initialize DB connection const dbConnection = Container.get(DbConnection); await dbConnection.init(); dataSource = Container.get(DataSource); + }); + + beforeEach(async () => { + // Clear database before each test to ensure isolation + // Note: Migration tests must run sequentially (maxWorkers: 1) to avoid conflicts + const context = createTestMigrationContext(dataSource); + await context.queryRunner.clearDatabase(); + await context.queryRunner.release(); // Run migrations up to (but not including) target migration await initDbUpToMigration(MIGRATION_NAME); @@ -69,10 +77,19 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { const versionIdColumn = context.escape.columnName('versionId'); const versionId = nanoid(); - const placeholders = getParamPlaceholders(context, 9); - await context.queryRunner.query( - `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${activeColumn}, ${nodesColumn}, ${connectionsColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${triggerCountColumn}, ${versionIdColumn}) VALUES (${placeholders})`, - [workflowId, workflowName, false, '[]', '{}', new Date(), new Date(), 0, versionId], + await context.runQuery( + `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${activeColumn}, ${nodesColumn}, ${connectionsColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${triggerCountColumn}, ${versionIdColumn}) VALUES (:id, :name, :active, :nodes, :connections, :createdAt, :updatedAt, :triggerCount, :versionId)`, + { + id: workflowId, + name: workflowName, + active: false, + nodes: '[]', + connections: '{}', + createdAt: new Date(), + updatedAt: new Date(), + triggerCount: 0, + versionId, + }, ); } @@ -92,10 +109,9 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { const latestEventColumn = context.escape.columnName('latestEvent'); const rootCountColumn = context.escape.columnName('rootCount'); - const placeholders = getParamPlaceholders(context, 5); - await context.queryRunner.query( - `INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}) VALUES (${placeholders})`, - [workflowId, name, count, new Date(), 0], + await context.runQuery( + `INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}) VALUES (:workflowId, :name, :count, :latestEvent, :rootCount)`, + { workflowId, name, count, latestEvent: new Date(), rootCount: 0 }, ); } @@ -117,10 +133,9 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { const rootCountColumn = context.escape.columnName('rootCount'); const workflowNameColumn = context.escape.columnName('workflowName'); - const placeholders = getParamPlaceholders(context, 6); - await context.queryRunner.query( - `INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}, ${workflowNameColumn}) VALUES (${placeholders})`, - [workflowId, name, count, new Date(), 0, workflowName], + await context.runQuery( + `INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}, ${workflowNameColumn}) VALUES (:workflowId, :name, :count, :latestEvent, :rootCount, :workflowName)`, + { workflowId, name, count, latestEvent: new Date(), rootCount: 0, workflowName }, ); } @@ -138,10 +153,12 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { const nameColumn = context.escape.columnName('name'); const countColumn = context.escape.columnName('count'); - const placeholder = getParamPlaceholder(context); - return await context.queryRunner.query( - `SELECT ${workflowIdColumn} as "workflowId", ${nameColumn} as "name", ${countColumn} as "count" FROM ${tableName} WHERE ${workflowIdColumn} = ${placeholder}`, - [workflowId], + // Cast count to integer for consistent return type + const countSelect = context.isPostgres ? `${countColumn}::int` : countColumn; + + return await context.runQuery( + `SELECT ${workflowIdColumn}, ${nameColumn}, ${countSelect} as ${countColumn} FROM ${tableName} WHERE ${workflowIdColumn} = :workflowId`, + { workflowId }, ); } @@ -160,10 +177,12 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { const countColumn = context.escape.columnName('count'); const workflowNameColumn = context.escape.columnName('workflowName'); - const placeholder = getParamPlaceholder(context); - return await context.queryRunner.query( - `SELECT ${workflowIdColumn} as "workflowId", ${nameColumn} as "name", ${countColumn} as "count", ${workflowNameColumn} as "workflowName" FROM ${tableName} WHERE ${workflowIdColumn} = ${placeholder}`, - [workflowId], + // Cast count to integer for consistent return type + const countSelect = context.isPostgres ? `${countColumn}::int` : countColumn; + + return await context.runQuery( + `SELECT ${workflowIdColumn}, ${nameColumn}, ${countSelect} as ${countColumn}, ${workflowNameColumn} FROM ${tableName} WHERE ${workflowIdColumn} = :workflowId`, + { workflowId }, ); } @@ -182,10 +201,12 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { const countColumn = context.escape.columnName('count'); const workflowNameColumn = context.escape.columnName('workflowName'); - const placeholder = getParamPlaceholder(context); - return await context.queryRunner.query( - `SELECT ${workflowIdColumn} as "workflowId", ${nameColumn} as "name", ${countColumn} as "count", ${workflowNameColumn} as "workflowName" FROM ${tableName} WHERE ${nameColumn} = ${placeholder}`, - [name], + // Cast count to integer for consistent return type + const countSelect = context.isPostgres ? `${countColumn}::int` : countColumn; + + return await context.runQuery( + `SELECT ${workflowIdColumn}, ${nameColumn}, ${countSelect} as ${countColumn}, ${workflowNameColumn} FROM ${tableName} WHERE ${nameColumn} = :name`, + { name }, ); } @@ -421,9 +442,7 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => { }); it('should delete orphaned statistics during rollback before restoring FK constraint', async () => { - // The database is already in post-migration state from the previous test - // We need to rollback first, then run the migration again - await undoLastSingleMigration(); + // Run the migration first (beforeEach only runs up to, not including, the target migration) await runSingleMigration(MIGRATION_NAME); // Create context AFTER migration runs diff --git a/packages/cli/test/migration/migration-test-helpers.test.ts b/packages/cli/test/migration/migration-test-helpers.test.ts index c7468f0d602..3458c7e2f42 100644 --- a/packages/cli/test/migration/migration-test-helpers.test.ts +++ b/packages/cli/test/migration/migration-test-helpers.test.ts @@ -1,4 +1,5 @@ import { initDbUpToMigration, runSingleMigration } from '@n8n/backend-test-utils'; +import { GlobalConfig } from '@n8n/config'; import { DbConnection } from '@n8n/db'; import { Container } from '@n8n/di'; import { DataSource } from '@n8n/typeorm'; @@ -7,6 +8,21 @@ import { UnexpectedError } from 'n8n-workflow'; describe('Migration Test Helpers', () => { let dataSource: DataSource; + /** + * Get the properly qualified migrations table name for the current database + */ + function getMigrationsTableName(): string { + const globalConfig = Container.get(GlobalConfig); + const dbType = globalConfig.database.type; + const tablePrefix = globalConfig.database.tablePrefix; + + if (dbType === 'postgresdb') { + const schema = globalConfig.database.postgresdb.schema; + return `${schema}."${tablePrefix}migrations"`; + } + return `"${tablePrefix}migrations"`; + } + beforeEach(async () => { // Initialize connection without running migrations const dbConnection = Container.get(DbConnection); @@ -15,6 +31,16 @@ describe('Migration Test Helpers', () => { }); afterEach(async () => { + // Clean up the migrations table + const globalConfig = Container.get(GlobalConfig); + if (globalConfig.database.type === 'postgresdb') { + try { + await dataSource.query(`TRUNCATE ${getMigrationsTableName()} CASCADE`); + } catch { + // Ignore errors if table doesn't exist + } + } + const dbConnection = Container.get(DbConnection); await dbConnection.close(); }); @@ -36,7 +62,9 @@ describe('Migration Test Helpers', () => { console.log('Migrations executed up to ' + secondMigrationName); // Verify only first migration was executed - const executed = await dataSource.query('SELECT * FROM migrations ORDER BY timestamp'); + const executed = await dataSource.query( + `SELECT * FROM ${getMigrationsTableName()} ORDER BY timestamp`, + ); expect(executed).toHaveLength(1); expect(executed[0].name).toBe(migrations[0].name); }); @@ -60,7 +88,9 @@ describe('Migration Test Helpers', () => { await runSingleMigration(secondMigrationName); - const executed = await dataSource.query('SELECT * FROM migrations ORDER BY timestamp'); + const executed = await dataSource.query( + `SELECT * FROM ${getMigrationsTableName()} ORDER BY timestamp`, + ); expect(executed).toHaveLength(2); expect(executed[0].name).toBe(migrations[0].name); expect(executed[1].name).toBe(secondMigrationName);