chore(core): Run migration tests on PRs (#25181)

This commit is contained in:
Guillaume Jacquart 2026-02-04 14:29:54 +01:00 committed by GitHub
parent f6db13a858
commit a7ebe7746e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 401 additions and 144 deletions

View File

@ -11,6 +11,7 @@ on:
- packages/cli/src/modules/**/*.entity.ts - packages/cli/src/modules/**/*.entity.ts
- packages/cli/src/modules/**/*.repository.ts - packages/cli/src/modules/**/*.repository.ts
- packages/cli/test/integration/** - packages/cli/test/integration/**
- packages/cli/test/migration/**
- packages/cli/test/shared/db/** - packages/cli/test/shared/db/**
- packages/@n8n/db/** - packages/@n8n/db/**
- packages/cli/**/__tests__/** - packages/cli/**/__tests__/**
@ -53,6 +54,10 @@ jobs:
working-directory: packages/cli working-directory: packages/cli
run: pnpm test:sqlite run: pnpm test:sqlite
- name: Test SQLite Pooled Migrations
working-directory: packages/cli
run: pnpm test:sqlite:migrations:tc
postgres: postgres:
name: Postgres name: Postgres
needs: build needs: build
@ -69,9 +74,13 @@ jobs:
- name: Pre-pull Test Container Images - name: Pre-pull Test Container Images
run: npx tsx packages/testing/containers/pull-test-images.ts || true run: npx tsx packages/testing/containers/pull-test-images.ts || true
- name: Test Postgres - name: Test Postgres Integration
working-directory: packages/cli working-directory: packages/cli
run: pnpm test:postgres:tc run: pnpm test:postgres:integration:tc
- name: Test Postgres Migrations
working-directory: packages/cli
run: pnpm test:postgres:migrations:tc
notify-on-failure: notify-on-failure:
name: Notify Slack on failure name: Notify Slack on failure

View File

@ -1,7 +1,7 @@
import { GlobalConfig } from '@n8n/config'; import { GlobalConfig } from '@n8n/config';
import { type DatabaseType, DbConnection, type Migration } from '@n8n/db'; import { type DatabaseType, DbConnection, type Migration } from '@n8n/db';
import { Container } from '@n8n/di'; import { Container } from '@n8n/di';
import { DataSource, type QueryRunner } from '@n8n/typeorm'; import { DataSource, type ObjectLiteral, type QueryRunner } from '@n8n/typeorm';
import { UnexpectedError } from 'n8n-workflow'; import { UnexpectedError } from 'n8n-workflow';
async function reinitializeDataConnection(): Promise<void> { async function reinitializeDataConnection(): Promise<void> {
@ -10,6 +10,21 @@ async function reinitializeDataConnection(): Promise<void> {
await dbConnection.init(); await dbConnection.init();
} }
/**
* Get the properly qualified migrations table name for the current database.
*/
function getMigrationsTableName(): string {
const globalConfig = Container.get(GlobalConfig);
const dbType = globalConfig.database.type;
const tablePrefix = globalConfig.database.tablePrefix;
if (dbType === 'postgresdb') {
const schema = globalConfig.database.postgresdb.schema;
return `${schema}."${tablePrefix}migrations"`;
}
return `"${tablePrefix}migrations"`;
}
/** /**
* Test migration context with database-specific helpers (similar to MigrationContext). * Test migration context with database-specific helpers (similar to MigrationContext).
*/ */
@ -24,6 +39,7 @@ export interface TestMigrationContext {
tableName(name: string): string; tableName(name: string): string;
indexName(name: string): string; indexName(name: string): string;
}; };
runQuery: <T = unknown>(sql: string, namedParameters?: ObjectLiteral) => Promise<T>;
} }
/** /**
@ -47,6 +63,32 @@ export function createTestMigrationContext(dataSource: DataSource): TestMigratio
tableName: (name) => queryRunner.connection.driver.escape(`${tablePrefix}${name}`), tableName: (name) => queryRunner.connection.driver.escape(`${tablePrefix}${name}`),
indexName: (name) => queryRunner.connection.driver.escape(`IDX_${tablePrefix}${name}`), indexName: (name) => queryRunner.connection.driver.escape(`IDX_${tablePrefix}${name}`),
}, },
runQuery: async <T>(sql: string, namedParameters?: ObjectLiteral) => {
if (namedParameters) {
if (dbType === 'postgresdb') {
// For PostgreSQL, convert named parameters to positional ($1, $2, etc.)
// This handles JSON columns properly which don't work well with TypeORM's escapeQueryWithParameters
// Use negative lookbehind to avoid matching PostgreSQL's :: cast operator
let paramIndex = 1;
const paramValues: unknown[] = [];
const convertedSql = sql.replace(/(?<!:):(\w+)/g, (_, paramName: string) => {
paramValues.push(namedParameters[paramName] as unknown);
return `$${paramIndex++}`;
});
return (await queryRunner.query(convertedSql, paramValues)) as T;
} else {
// For MySQL/SQLite, use TypeORM's escapeQueryWithParameters
const [query, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
sql,
namedParameters,
{},
);
return (await queryRunner.query(query, parameters)) as T;
}
} else {
return (await queryRunner.query(sql)) as T;
}
},
}; };
} }
@ -97,7 +139,7 @@ export async function undoLastSingleMigration(): Promise<void> {
// Get the last executed migration from the migrations table // Get the last executed migration from the migrations table
const executedMigrations = await dataSource.query<Array<{ name: string }>>( const executedMigrations = await dataSource.query<Array<{ name: string }>>(
'SELECT * FROM migrations ORDER BY timestamp DESC LIMIT 1', `SELECT * FROM ${getMigrationsTableName()} ORDER BY timestamp DESC LIMIT 1`,
); );
if (executedMigrations.length === 0) { if (executedMigrations.length === 0) {

View File

@ -14,8 +14,12 @@ module.exports = {
coveragePathIgnorePatterns: ['/src/databases/migrations/'], coveragePathIgnorePatterns: ['/src/databases/migrations/'],
testTimeout: 10_000, testTimeout: 10_000,
prettierPath: null, prettierPath: null,
// Run integration tests from both test/integration and src/ directories // Run integration tests from test/integration, test/migration and src/ directories
testRegex: undefined, // Override base config testRegex testRegex: undefined, // Override base config testRegex
testMatch: ['<rootDir>/test/integration/**/*.test.ts', '<rootDir>/src/**/*.integration.test.ts'], testMatch: [
'<rootDir>/test/integration/**/*.test.ts',
'<rootDir>/test/migration/**/*.test.ts',
'<rootDir>/src/**/*.integration.test.ts',
],
testPathIgnorePatterns: ['/dist/', '/node_modules/'], testPathIgnorePatterns: ['/dist/', '/node_modules/'],
}; };

View File

@ -0,0 +1,8 @@
/** @type {import('jest').Config} */
module.exports = {
...require('./jest.config.integration.js'),
// Override testMatch to only run migration tests
testMatch: ['<rootDir>/test/migration/**/*.test.ts'],
// Run migration tests sequentially to avoid database conflicts
maxWorkers: 1,
};

View File

@ -0,0 +1,23 @@
/**
* Jest config for migration tests using testcontainers.
* Migration tests must run sequentially (maxWorkers: 1) because they:
* - Share the same database instance
* - Perform rollbacks that affect subsequent tests
* - Clear and reinitialize the database in beforeEach hooks
*
* Usage: pnpm test:postgres:migrations:tc
*/
/** @type {import('jest').Config} */
module.exports = {
...require('./jest.config.migration.js'),
globalSetup: '<rootDir>/test/setup-testcontainers.js',
globalTeardown: '<rootDir>/test/teardown-testcontainers.js',
// Longer timeout for container startup
testTimeout: 30_000,
// Disable caching - testcontainers' signal-exit conflicts with Jest's
// transform cache (write-file-atomic). Performance impact is minimal
// since integration tests are I/O-bound, not transform-bound.
cache: false,
forceExit: true,
};

View File

@ -25,8 +25,13 @@
"test:integration": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.integration.js", "test:integration": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.integration.js",
"test:dev": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --watch", "test:dev": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --watch",
"test:sqlite": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.integration.js --no-coverage", "test:sqlite": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.integration.js --no-coverage",
"test:sqlite:migrations": "N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.migration.js --no-coverage",
"test:sqlite:migrations:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent DB_SQLITE_POOL_SIZE=4 DB_TYPE=sqlite jest --config=jest.config.migration.testcontainers.js --no-coverage",
"test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.integration.js --no-coverage", "test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.integration.js --no-coverage",
"test:postgres:migrations": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.migration.js --no-coverage",
"test:postgres:migrations:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --config=jest.config.migration.testcontainers.js --no-coverage",
"test:postgres:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent jest --config=jest.config.integration.testcontainers.js --no-coverage", "test:postgres:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent jest --config=jest.config.integration.testcontainers.js --no-coverage",
"test:postgres:integration:tc": "TESTCONTAINERS_RYUK_DISABLED=true N8N_LOG_LEVEL=silent jest --config=jest.config.integration.testcontainers.js --no-coverage --testPathIgnorePatterns=/test/migration/",
"test:mariadb": "echo true", "test:mariadb": "echo true",
"test:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest", "test:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest",
"test:dev:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest --watch", "test:dev:win": "set N8N_LOG_LEVEL=silent&& set DB_SQLITE_POOL_SIZE=4&& set DB_TYPE=sqlite&& jest --watch",

View File

@ -36,6 +36,11 @@ describe('UniqueRoleNames Migration', () => {
dataSource = Container.get(DataSource); dataSource = Container.get(DataSource);
// Clear database to start with clean slate
const context = createTestMigrationContext(dataSource);
await context.queryRunner.clearDatabase();
await context.queryRunner.release();
// Run migrations up to (but not including) target migration // Run migrations up to (but not including) target migration
await initDbUpToMigration(MIGRATION_NAME); await initDbUpToMigration(MIGRATION_NAME);
}); });
@ -62,17 +67,17 @@ describe('UniqueRoleNames Migration', () => {
const roleType = roleData.roleType ?? 'project'; const roleType = roleData.roleType ?? 'project';
const description = roleData.description ?? null; const description = roleData.description ?? null;
await context.queryRunner.query( await context.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}, ${descriptionColumn}) VALUES (?, ?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}, ${descriptionColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType, :description)`,
[ {
roleData.slug, slug: roleData.slug,
roleData.displayName, displayName: roleData.displayName,
roleData.createdAt, createdAt: roleData.createdAt,
roleData.createdAt, updatedAt: roleData.createdAt,
systemRole, systemRole,
roleType, roleType,
description, description,
], },
); );
} }
@ -85,8 +90,9 @@ describe('UniqueRoleNames Migration', () => {
const displayNameColumn = context.escape.columnName('displayName'); const displayNameColumn = context.escape.columnName('displayName');
const createdAtColumn = context.escape.columnName('createdAt'); const createdAtColumn = context.escape.columnName('createdAt');
const roles = await context.queryRunner.query( const roles = await context.runQuery<RoleRow[]>(
`SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName, ${createdAtColumn} as createdAt FROM ${tableName} ORDER BY ${createdAtColumn} ASC`, `SELECT ${slugColumn}, ${displayNameColumn}, ${createdAtColumn} FROM ${tableName} ORDER BY ${createdAtColumn} ASC`,
{},
); );
return roles; return roles;
@ -261,26 +267,41 @@ describe('UniqueRoleNames Migration', () => {
); );
expect(uniqueIndex).toBeDefined(); expect(uniqueIndex).toBeDefined();
} else if (postMigrationContext.isPostgres) { } else if (postMigrationContext.isPostgres) {
const result = await postMigrationContext.queryRunner.query( // For PostgreSQL, we need the actual table/index names without quotes
`SELECT indexname FROM pg_indexes WHERE tablename = ${tableName} AND indexname = ${indexName}`, // The escaped indexName has quotes, so we need to strip them
const actualTableName = postMigrationContext.tablePrefix + 'role';
const actualIndexName = indexName.replace(/"/g, ''); // Remove quotes from escaped name
const result = await postMigrationContext.runQuery(
'SELECT indexname FROM pg_indexes WHERE tablename = :tableName AND indexname = :indexName',
{ tableName: actualTableName, indexName: actualIndexName },
); );
expect(result).toHaveLength(1); expect(result).toHaveLength(1);
// Verify index is unique // Verify index is unique
const uniqueCheck = await postMigrationContext.queryRunner.query( const uniqueCheck = await postMigrationContext.runQuery<
Array<{ index_name: string; indisunique: boolean }>
>(
`SELECT i.relname as index_name, ix.indisunique `SELECT i.relname as index_name, ix.indisunique
FROM pg_class t FROM pg_class t
JOIN pg_index ix ON t.oid = ix.indrelid JOIN pg_index ix ON t.oid = ix.indrelid
JOIN pg_class i ON i.oid = ix.indexrelid JOIN pg_class i ON i.oid = ix.indexrelid
WHERE t.relname = ${tableName} AND i.relname = ${indexName}`, WHERE t.relname = :tableName AND i.relname = :indexName`,
{ tableName: actualTableName, indexName: actualIndexName },
); );
expect(uniqueCheck[0].indisunique).toBe(true); expect(uniqueCheck[0].indisunique).toBe(true);
} }
// Verify index enforces uniqueness by attempting duplicate insert // Verify index enforces uniqueness by attempting duplicate insert
await postMigrationContext.queryRunner.query( await postMigrationContext.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${postMigrationContext.escape.columnName('createdAt')}, ${postMigrationContext.escape.columnName('updatedAt')}, ${postMigrationContext.escape.columnName('systemRole')}, ${postMigrationContext.escape.columnName('roleType')}) VALUES (?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${postMigrationContext.escape.columnName('createdAt')}, ${postMigrationContext.escape.columnName('updatedAt')}, ${postMigrationContext.escape.columnName('systemRole')}, ${postMigrationContext.escape.columnName('roleType')}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`,
['test-duplicate-attempt', 'Unique Test Name', new Date(), new Date(), false, 'project'], {
slug: 'test-duplicate-attempt',
displayName: 'Unique Test Name',
createdAt: new Date(),
updatedAt: new Date(),
systemRole: false,
roleType: 'project',
},
); );
const attemptDuplicateInsert = async () => { const attemptDuplicateInsert = async () => {
@ -326,8 +347,13 @@ describe('UniqueRoleNames Migration', () => {
); );
expect(uniqueIndex).toBeDefined(); expect(uniqueIndex).toBeDefined();
} else if (upContext.isPostgres) { } else if (upContext.isPostgres) {
const result = await upContext.queryRunner.query( // For PostgreSQL, we need the actual table/index names without quotes
`SELECT indexname FROM pg_indexes WHERE tablename = ${tableName} AND indexname = ${indexName}`, // The escaped indexName has quotes, so we need to strip them
const actualTableName = upContext.tablePrefix + 'role';
const actualIndexName = indexName.replace(/"/g, ''); // Remove quotes from escaped name
const result = await upContext.runQuery(
'SELECT indexname FROM pg_indexes WHERE tablename = :tableName AND indexname = :indexName',
{ tableName: actualTableName, indexName: actualIndexName },
); );
expect(result).toHaveLength(1); expect(result).toHaveLength(1);
} }
@ -341,7 +367,7 @@ describe('UniqueRoleNames Migration', () => {
// Verify index is removed (DB-specific queries) // Verify index is removed (DB-specific queries)
if (postRollbackContext.isSqlite) { if (postRollbackContext.isSqlite) {
const indexes = await postRollbackContext.queryRunner.query( const indexes = await postRollbackContext.runQuery<Array<{ name: string; unique: number }>>(
`PRAGMA index_list(${tableName})`, `PRAGMA index_list(${tableName})`,
); );
const uniqueIndex = indexes.find( const uniqueIndex = indexes.find(
@ -350,8 +376,12 @@ describe('UniqueRoleNames Migration', () => {
); );
expect(uniqueIndex).toBeUndefined(); expect(uniqueIndex).toBeUndefined();
} else if (postRollbackContext.isPostgres) { } else if (postRollbackContext.isPostgres) {
const result = await postRollbackContext.queryRunner.query( const result = await postRollbackContext.runQuery(
`SELECT indexname FROM pg_indexes WHERE tablename = ${tableName} AND indexname = ${indexName}`, 'SELECT indexname FROM pg_indexes WHERE tablename = :tableName AND indexname = :indexName',
{
tableName: postRollbackContext.tablePrefix + 'role',
indexName: indexName.replace(/"/g, ''),
},
); );
expect(result).toHaveLength(0); expect(result).toHaveLength(0);
} }
@ -365,20 +395,34 @@ describe('UniqueRoleNames Migration', () => {
const systemRoleColumn = postRollbackContext.escape.columnName('systemRole'); const systemRoleColumn = postRollbackContext.escape.columnName('systemRole');
const roleTypeColumn = postRollbackContext.escape.columnName('roleType'); const roleTypeColumn = postRollbackContext.escape.columnName('roleType');
await postRollbackContext.queryRunner.query( await postRollbackContext.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`,
['rollback-test-1', 'Duplicate After Rollback', new Date(), new Date(), false, 'project'], {
slug: 'rollback-test-1',
displayName: 'Duplicate After Rollback',
createdAt: new Date(),
updatedAt: new Date(),
systemRole: false,
roleType: 'project',
},
); );
await postRollbackContext.queryRunner.query( await postRollbackContext.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`,
['rollback-test-2', 'Duplicate After Rollback', new Date(), new Date(), false, 'project'], {
slug: 'rollback-test-2',
displayName: 'Duplicate After Rollback',
createdAt: new Date(),
updatedAt: new Date(),
systemRole: false,
roleType: 'project',
},
); );
// Verify both roles were inserted successfully // Verify both roles were inserted successfully
const duplicateRoles = await postRollbackContext.queryRunner.query( const duplicateRoles = await postRollbackContext.runQuery(
`SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName FROM ${tableName} WHERE ${displayNameColumn} = ?`, `SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName FROM ${tableName} WHERE ${displayNameColumn} = :displayName`,
['Duplicate After Rollback'], { displayName: 'Duplicate After Rollback' },
); );
expect(duplicateRoles).toHaveLength(2); expect(duplicateRoles).toHaveLength(2);
@ -401,19 +445,28 @@ describe('UniqueRoleNames Migration', () => {
const roleTypeColumn = context.escape.columnName('roleType'); const roleTypeColumn = context.escape.columnName('roleType');
// Insert role with unique displayName // Insert role with unique displayName
await context.queryRunner.query( await context.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${systemRoleColumn}, ${roleTypeColumn}) VALUES (:slug, :displayName, :createdAt, :updatedAt, :systemRole, :roleType)`,
['unique-role-test', 'Unique Role Name', new Date(), new Date(), false, 'project'], {
slug: 'unique-role-test',
displayName: 'Unique Role Name',
createdAt: new Date(),
updatedAt: new Date(),
systemRole: false,
roleType: 'project',
},
); );
// Verify retrieval using SQL // Verify retrieval using SQL
const [result] = await context.queryRunner.query( const results = await context.runQuery<Array<{ slug: string; displayName: string }>>(
`SELECT ${slugColumn} as slug, ${displayNameColumn} as displayName FROM ${tableName} WHERE ${slugColumn} = ?`, `SELECT ${slugColumn}, ${displayNameColumn} FROM ${tableName} WHERE ${slugColumn} = :slug`,
['unique-role-test'], { slug: 'unique-role-test' },
); );
expect(result).toBeDefined(); expect(results).toHaveLength(1);
expect(result.displayName).toBe('Unique Role Name'); // Access using the actual database column name
const row = results[0] as Record<string, unknown>;
expect(row.displayName ?? row.displayname).toBe('Unique Role Name');
// Cleanup // Cleanup
await context.queryRunner.release(); await context.queryRunner.release();

View File

@ -4,6 +4,7 @@ import {
runSingleMigration, runSingleMigration,
type TestMigrationContext, type TestMigrationContext,
} from '@n8n/backend-test-utils'; } from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import { DbConnection } from '@n8n/db'; import { DbConnection } from '@n8n/db';
import { Container } from '@n8n/di'; import { Container } from '@n8n/di';
import { DataSource } from '@n8n/typeorm'; import { DataSource } from '@n8n/typeorm';
@ -40,6 +41,11 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => {
dataSource = Container.get(DataSource); dataSource = Container.get(DataSource);
// Clear database to start with clean slate
const context = createTestMigrationContext(dataSource);
await context.queryRunner.clearDatabase();
await context.queryRunner.release();
await initDbUpToMigration(MIGRATION_NAME); await initDbUpToMigration(MIGRATION_NAME);
}); });
@ -53,7 +59,9 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => {
workflowData: WorkflowData, workflowData: WorkflowData,
): Promise<void> { ): Promise<void> {
const tableName = context.escape.tableName('workflow_entity'); const tableName = context.escape.tableName('workflow_entity');
const historyTableName = context.escape.tableName('workflow_history');
const idColumn = context.escape.columnName('id'); const idColumn = context.escape.columnName('id');
const workflowIdColumn = context.escape.columnName('workflowId');
const nameColumn = context.escape.columnName('name'); const nameColumn = context.escape.columnName('name');
const nodesColumn = context.escape.columnName('nodes'); const nodesColumn = context.escape.columnName('nodes');
const connectionsColumn = context.escape.columnName('connections'); const connectionsColumn = context.escape.columnName('connections');
@ -62,18 +70,34 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => {
const createdAtColumn = context.escape.columnName('createdAt'); const createdAtColumn = context.escape.columnName('createdAt');
const updatedAtColumn = context.escape.columnName('updatedAt'); const updatedAtColumn = context.escape.columnName('updatedAt');
await context.queryRunner.query( // Insert workflow_entity record first (workflow_history references it)
`INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, await context.runQuery(
[ `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:id, :name, :nodes, :connections, :active, :versionId, :createdAt, :updatedAt)`,
workflowData.id, {
workflowData.name, id: workflowData.id,
JSON.stringify(workflowData.nodes), name: workflowData.name,
JSON.stringify(workflowData.connections), nodes: JSON.stringify(workflowData.nodes),
workflowData.active, connections: JSON.stringify(workflowData.connections),
workflowData.versionId, active: workflowData.active,
workflowData.createdAt, versionId: workflowData.versionId,
workflowData.updatedAt, createdAt: workflowData.createdAt,
], updatedAt: workflowData.updatedAt,
},
);
// Insert workflow_history record (required for activeVersionId foreign key)
const authorsColumn = context.escape.columnName('authors');
await context.runQuery(
`INSERT INTO ${historyTableName} (${versionIdColumn}, ${workflowIdColumn}, ${nodesColumn}, ${connectionsColumn}, ${authorsColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:versionId, :workflowId, :nodes, :connections, :authors, :createdAt, :updatedAt)`,
{
versionId: workflowData.versionId,
workflowId: workflowData.id,
nodes: JSON.stringify(workflowData.nodes),
connections: JSON.stringify(workflowData.connections),
authors: 'test-user',
createdAt: workflowData.createdAt,
updatedAt: workflowData.updatedAt,
},
); );
} }
@ -89,12 +113,24 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => {
const versionIdColumn = context.escape.columnName('versionId'); const versionIdColumn = context.escape.columnName('versionId');
const activeVersionIdColumn = context.escape.columnName('activeVersionId'); const activeVersionIdColumn = context.escape.columnName('activeVersionId');
const [workflow] = await context.queryRunner.query( // For PostgreSQL, cast JSON column to text to get string representation
`SELECT ${idColumn} as id, ${nameColumn} as name, ${activeColumn} as active, ${nodesColumn} as nodes, ${versionIdColumn} as versionId, ${activeVersionIdColumn} as activeVersionId FROM ${tableName} WHERE ${idColumn} = ?`, const nodesSelect = context.isPostgres ? `${nodesColumn}::text` : nodesColumn;
[id],
const workflows = await context.runQuery<
Array<{
id: string;
name: string;
active: boolean;
nodes: string;
versionId: string;
activeVersionId: string | null;
}>
>(
`SELECT ${idColumn}, ${nameColumn}, ${activeColumn}, ${nodesSelect}, ${versionIdColumn}, ${activeVersionIdColumn} FROM ${tableName} WHERE ${idColumn} = :id`,
{ id },
); );
return workflow; return workflows[0] as WorkflowRow | undefined;
} }
describe('Up Migration', () => { describe('Up Migration', () => {
@ -396,30 +432,35 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => {
}); });
// Insert workflow with invalid JSON containing unescaped control characters // Insert workflow with invalid JSON containing unescaped control characters
const tableName = context.escape.tableName('workflow_entity'); // Note: PostgreSQL enforces strict JSON validation and won't allow invalid JSON,
const idColumn = context.escape.columnName('id'); // so we skip this test data for PostgreSQL
const nameColumn = context.escape.columnName('name'); if (!context.isPostgres) {
const nodesColumn = context.escape.columnName('nodes'); const tableName = context.escape.tableName('workflow_entity');
const connectionsColumn = context.escape.columnName('connections'); const idColumn = context.escape.columnName('id');
const activeColumn = context.escape.columnName('active'); const nameColumn = context.escape.columnName('name');
const versionIdColumn = context.escape.columnName('versionId'); const nodesColumn = context.escape.columnName('nodes');
const createdAtColumn = context.escape.columnName('createdAt'); const connectionsColumn = context.escape.columnName('connections');
const updatedAtColumn = context.escape.columnName('updatedAt'); const activeColumn = context.escape.columnName('active');
const versionIdColumn = context.escape.columnName('versionId');
const createdAtColumn = context.escape.columnName('createdAt');
const updatedAtColumn = context.escape.columnName('updatedAt');
await context.queryRunner.query( await context.runQuery(
`INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${nodesColumn}, ${connectionsColumn}, ${activeColumn}, ${versionIdColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:id, :name, :nodes, :connections, :active, :versionId, :createdAt, :updatedAt)`,
[ {
workflowIds.invalidJson, id: workflowIds.invalidJson,
'Test Invalid JSON with Control Characters', name: 'Test Invalid JSON with Control Characters',
// Invalid JSON with unescaped newline (simulating the production issue) // Invalid JSON with unescaped newline (simulating the production issue)
'[{"id":"test","type":"n8n-nodes-base.executeWorkflowTrigger","parameters":{"inputSource":"passthrough","description":"MEASUREMENT DETAILS: \\"N/A\\"\\nMEASUREMENT TYPE: \\"N/A\\"\n"},"typeVersion":1,"position":[0,0]}]', nodes:
'{}', '[{"id":"test","type":"n8n-nodes-base.executeWorkflowTrigger","parameters":{"inputSource":"passthrough","description":"MEASUREMENT DETAILS: \\"N/A\\"\\nMEASUREMENT TYPE: \\"N/A\\"\n"},"typeVersion":1,"position":[0,0]}]',
false, connections: '{}',
randomUUID(), active: false,
new Date(), versionId: randomUUID(),
new Date(), createdAt: new Date(),
], updatedAt: new Date(),
); },
);
}
await runSingleMigration(MIGRATION_NAME); await runSingleMigration(MIGRATION_NAME);
await context.queryRunner.release(); await context.queryRunner.release();
@ -554,18 +595,25 @@ describe('ActivateExecuteWorkflowTriggerWorkflows Migration', () => {
await context.queryRunner.release(); await context.queryRunner.release();
}); });
it('should skip workflows with invalid JSON containing unescaped control characters', async () => { // PostgreSQL enforces strict JSON validation and won't allow invalid JSON to be inserted,
const context = createTestMigrationContext(dataSource); // so we skip this test for PostgreSQL
const workflow = await getWorkflowById(context, workflowIds.invalidJson); // eslint-disable-next-line n8n-local-rules/no-skipped-tests
const testFn = Container.get(GlobalConfig).database.type === 'postgresdb' ? it.skip : it;
testFn(
'should skip workflows with invalid JSON containing unescaped control characters',
async () => {
const context = createTestMigrationContext(dataSource);
const workflow = await getWorkflowById(context, workflowIds.invalidJson);
// Workflow should remain inactive and unchanged // Workflow should remain inactive and unchanged
expect(workflow?.active).toBeFalsy(); expect(workflow?.active).toBeFalsy();
expect(workflow?.activeVersionId).toBeNull(); expect(workflow?.activeVersionId).toBeNull();
// Verify the invalid JSON is still in the database (unchanged) // Verify the invalid JSON is still in the database (unchanged)
expect(workflow?.nodes).toContain('MEASUREMENT DETAILS'); expect(workflow?.nodes).toContain('MEASUREMENT DETAILS');
await context.queryRunner.release(); await context.queryRunner.release();
}); },
);
}); });
}); });

View File

@ -43,6 +43,11 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => {
dataSource = Container.get(DataSource); dataSource = Container.get(DataSource);
// Clear database to start with clean slate
const context = createTestMigrationContext(dataSource);
await context.queryRunner.clearDatabase();
await context.queryRunner.release();
await initDbUpToMigration(MIGRATION_NAME); await initDbUpToMigration(MIGRATION_NAME);
}); });
@ -63,15 +68,19 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => {
const displayNameColumn = context.escape.columnName('displayName'); const displayNameColumn = context.escape.columnName('displayName');
const descriptionColumn = context.escape.columnName('description'); const descriptionColumn = context.escape.columnName('description');
const existingScope = await context.queryRunner.query( const existingScope = await context.runQuery<unknown[]>(
`SELECT ${slugColumn} FROM ${tableName} WHERE ${slugColumn} = ?`, `SELECT ${slugColumn} FROM ${tableName} WHERE ${slugColumn} = :slug`,
[scopeData.slug], { slug: scopeData.slug },
); );
if (existingScope.length === 0) { if (existingScope.length === 0) {
await context.queryRunner.query( await context.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${descriptionColumn}) VALUES (?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${descriptionColumn}) VALUES (:slug, :displayName, :description)`,
[scopeData.slug, scopeData.displayName, scopeData.description], {
slug: scopeData.slug,
displayName: scopeData.displayName,
description: scopeData.description,
},
); );
} }
} }
@ -90,9 +99,16 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => {
const systemRole = roleData.systemRole ?? false; const systemRole = roleData.systemRole ?? false;
await context.queryRunner.query( await context.runQuery(
`INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${roleTypeColumn}, ${systemRoleColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (?, ?, ?, ?, ?, ?)`, `INSERT INTO ${tableName} (${slugColumn}, ${displayNameColumn}, ${roleTypeColumn}, ${systemRoleColumn}, ${createdAtColumn}, ${updatedAtColumn}) VALUES (:slug, :displayName, :roleType, :systemRole, :createdAt, :updatedAt)`,
[roleData.slug, roleData.displayName, roleData.roleType, systemRole, new Date(), new Date()], {
slug: roleData.slug,
displayName: roleData.displayName,
roleType: roleData.roleType,
systemRole,
createdAt: new Date(),
updatedAt: new Date(),
},
); );
} }
@ -107,9 +123,9 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => {
const roleSlugColumn = context.escape.columnName('roleSlug'); const roleSlugColumn = context.escape.columnName('roleSlug');
const scopeSlugColumn = context.escape.columnName('scopeSlug'); const scopeSlugColumn = context.escape.columnName('scopeSlug');
await context.queryRunner.query( await context.runQuery(
`INSERT INTO ${tableName} (${roleSlugColumn}, ${scopeSlugColumn}) VALUES (?, ?)`, `INSERT INTO ${tableName} (${roleSlugColumn}, ${scopeSlugColumn}) VALUES (:roleSlug, :scopeSlug)`,
[roleScopeData.roleSlug, roleScopeData.scopeSlug], { roleSlug: roleScopeData.roleSlug, scopeSlug: roleScopeData.scopeSlug },
); );
} }
@ -124,12 +140,12 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => {
const roleSlugColumn = context.escape.columnName('roleSlug'); const roleSlugColumn = context.escape.columnName('roleSlug');
const scopeSlugColumn = context.escape.columnName('scopeSlug'); const scopeSlugColumn = context.escape.columnName('scopeSlug');
const scopes = await context.queryRunner.query( const scopes = await context.runQuery<Array<{ roleSlug: string; scopeSlug: string }>>(
`SELECT ${roleSlugColumn} as roleSlug, ${scopeSlugColumn} as scopeSlug FROM ${tableName} WHERE ${roleSlugColumn} = ?`, `SELECT ${roleSlugColumn}, ${scopeSlugColumn} FROM ${tableName} WHERE ${roleSlugColumn} = :roleSlug`,
[roleSlug], { roleSlug },
); );
return scopes; return scopes as RoleScopeRow[];
} }
/** /**
@ -143,12 +159,12 @@ describe('AddWorkflowPublishScopeToProjectRoles Migration', () => {
const roleSlugColumn = context.escape.columnName('roleSlug'); const roleSlugColumn = context.escape.columnName('roleSlug');
const scopeSlugColumn = context.escape.columnName('scopeSlug'); const scopeSlugColumn = context.escape.columnName('scopeSlug');
const roleScopeEntries = await context.queryRunner.query( const roleScopeEntries = await context.runQuery<Array<{ roleSlug: string; scopeSlug: string }>>(
`SELECT ${roleSlugColumn} as roleSlug, ${scopeSlugColumn} as scopeSlug FROM ${tableName} WHERE ${scopeSlugColumn} = ?`, `SELECT ${roleSlugColumn}, ${scopeSlugColumn} FROM ${tableName} WHERE ${scopeSlugColumn} = :scopeSlug`,
[scopeSlug], { scopeSlug },
); );
return roleScopeEntries; return roleScopeEntries as RoleScopeRow[];
} }
describe('up migration', () => { describe('up migration', () => {

View File

@ -34,11 +34,19 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
let dataSource: DataSource; let dataSource: DataSource;
beforeAll(async () => { beforeAll(async () => {
// Initialize DB connection without running migrations // Initialize DB connection
const dbConnection = Container.get(DbConnection); const dbConnection = Container.get(DbConnection);
await dbConnection.init(); await dbConnection.init();
dataSource = Container.get(DataSource); dataSource = Container.get(DataSource);
});
beforeEach(async () => {
// Clear database before each test to ensure isolation
// Note: Migration tests must run sequentially (maxWorkers: 1) to avoid conflicts
const context = createTestMigrationContext(dataSource);
await context.queryRunner.clearDatabase();
await context.queryRunner.release();
// Run migrations up to (but not including) target migration // Run migrations up to (but not including) target migration
await initDbUpToMigration(MIGRATION_NAME); await initDbUpToMigration(MIGRATION_NAME);
@ -69,10 +77,19 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
const versionIdColumn = context.escape.columnName('versionId'); const versionIdColumn = context.escape.columnName('versionId');
const versionId = nanoid(); const versionId = nanoid();
const placeholders = getParamPlaceholders(context, 9); await context.runQuery(
await context.queryRunner.query( `INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${activeColumn}, ${nodesColumn}, ${connectionsColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${triggerCountColumn}, ${versionIdColumn}) VALUES (:id, :name, :active, :nodes, :connections, :createdAt, :updatedAt, :triggerCount, :versionId)`,
`INSERT INTO ${tableName} (${idColumn}, ${nameColumn}, ${activeColumn}, ${nodesColumn}, ${connectionsColumn}, ${createdAtColumn}, ${updatedAtColumn}, ${triggerCountColumn}, ${versionIdColumn}) VALUES (${placeholders})`, {
[workflowId, workflowName, false, '[]', '{}', new Date(), new Date(), 0, versionId], id: workflowId,
name: workflowName,
active: false,
nodes: '[]',
connections: '{}',
createdAt: new Date(),
updatedAt: new Date(),
triggerCount: 0,
versionId,
},
); );
} }
@ -92,10 +109,9 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
const latestEventColumn = context.escape.columnName('latestEvent'); const latestEventColumn = context.escape.columnName('latestEvent');
const rootCountColumn = context.escape.columnName('rootCount'); const rootCountColumn = context.escape.columnName('rootCount');
const placeholders = getParamPlaceholders(context, 5); await context.runQuery(
await context.queryRunner.query( `INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}) VALUES (:workflowId, :name, :count, :latestEvent, :rootCount)`,
`INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}) VALUES (${placeholders})`, { workflowId, name, count, latestEvent: new Date(), rootCount: 0 },
[workflowId, name, count, new Date(), 0],
); );
} }
@ -117,10 +133,9 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
const rootCountColumn = context.escape.columnName('rootCount'); const rootCountColumn = context.escape.columnName('rootCount');
const workflowNameColumn = context.escape.columnName('workflowName'); const workflowNameColumn = context.escape.columnName('workflowName');
const placeholders = getParamPlaceholders(context, 6); await context.runQuery(
await context.queryRunner.query( `INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}, ${workflowNameColumn}) VALUES (:workflowId, :name, :count, :latestEvent, :rootCount, :workflowName)`,
`INSERT INTO ${tableName} (${workflowIdColumn}, ${nameColumn}, ${countColumn}, ${latestEventColumn}, ${rootCountColumn}, ${workflowNameColumn}) VALUES (${placeholders})`, { workflowId, name, count, latestEvent: new Date(), rootCount: 0, workflowName },
[workflowId, name, count, new Date(), 0, workflowName],
); );
} }
@ -138,10 +153,12 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
const nameColumn = context.escape.columnName('name'); const nameColumn = context.escape.columnName('name');
const countColumn = context.escape.columnName('count'); const countColumn = context.escape.columnName('count');
const placeholder = getParamPlaceholder(context); // Cast count to integer for consistent return type
return await context.queryRunner.query( const countSelect = context.isPostgres ? `${countColumn}::int` : countColumn;
`SELECT ${workflowIdColumn} as "workflowId", ${nameColumn} as "name", ${countColumn} as "count" FROM ${tableName} WHERE ${workflowIdColumn} = ${placeholder}`,
[workflowId], return await context.runQuery(
`SELECT ${workflowIdColumn}, ${nameColumn}, ${countSelect} as ${countColumn} FROM ${tableName} WHERE ${workflowIdColumn} = :workflowId`,
{ workflowId },
); );
} }
@ -160,10 +177,12 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
const countColumn = context.escape.columnName('count'); const countColumn = context.escape.columnName('count');
const workflowNameColumn = context.escape.columnName('workflowName'); const workflowNameColumn = context.escape.columnName('workflowName');
const placeholder = getParamPlaceholder(context); // Cast count to integer for consistent return type
return await context.queryRunner.query( const countSelect = context.isPostgres ? `${countColumn}::int` : countColumn;
`SELECT ${workflowIdColumn} as "workflowId", ${nameColumn} as "name", ${countColumn} as "count", ${workflowNameColumn} as "workflowName" FROM ${tableName} WHERE ${workflowIdColumn} = ${placeholder}`,
[workflowId], return await context.runQuery(
`SELECT ${workflowIdColumn}, ${nameColumn}, ${countSelect} as ${countColumn}, ${workflowNameColumn} FROM ${tableName} WHERE ${workflowIdColumn} = :workflowId`,
{ workflowId },
); );
} }
@ -182,10 +201,12 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
const countColumn = context.escape.columnName('count'); const countColumn = context.escape.columnName('count');
const workflowNameColumn = context.escape.columnName('workflowName'); const workflowNameColumn = context.escape.columnName('workflowName');
const placeholder = getParamPlaceholder(context); // Cast count to integer for consistent return type
return await context.queryRunner.query( const countSelect = context.isPostgres ? `${countColumn}::int` : countColumn;
`SELECT ${workflowIdColumn} as "workflowId", ${nameColumn} as "name", ${countColumn} as "count", ${workflowNameColumn} as "workflowName" FROM ${tableName} WHERE ${nameColumn} = ${placeholder}`,
[name], return await context.runQuery(
`SELECT ${workflowIdColumn}, ${nameColumn}, ${countSelect} as ${countColumn}, ${workflowNameColumn} FROM ${tableName} WHERE ${nameColumn} = :name`,
{ name },
); );
} }
@ -421,9 +442,7 @@ describe('ChangeWorkflowStatisticsFKToNoAction Migration', () => {
}); });
it('should delete orphaned statistics during rollback before restoring FK constraint', async () => { it('should delete orphaned statistics during rollback before restoring FK constraint', async () => {
// The database is already in post-migration state from the previous test // Run the migration first (beforeEach only runs up to, not including, the target migration)
// We need to rollback first, then run the migration again
await undoLastSingleMigration();
await runSingleMigration(MIGRATION_NAME); await runSingleMigration(MIGRATION_NAME);
// Create context AFTER migration runs // Create context AFTER migration runs

View File

@ -1,4 +1,5 @@
import { initDbUpToMigration, runSingleMigration } from '@n8n/backend-test-utils'; import { initDbUpToMigration, runSingleMigration } from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import { DbConnection } from '@n8n/db'; import { DbConnection } from '@n8n/db';
import { Container } from '@n8n/di'; import { Container } from '@n8n/di';
import { DataSource } from '@n8n/typeorm'; import { DataSource } from '@n8n/typeorm';
@ -7,6 +8,21 @@ import { UnexpectedError } from 'n8n-workflow';
describe('Migration Test Helpers', () => { describe('Migration Test Helpers', () => {
let dataSource: DataSource; let dataSource: DataSource;
/**
* Get the properly qualified migrations table name for the current database
*/
function getMigrationsTableName(): string {
const globalConfig = Container.get(GlobalConfig);
const dbType = globalConfig.database.type;
const tablePrefix = globalConfig.database.tablePrefix;
if (dbType === 'postgresdb') {
const schema = globalConfig.database.postgresdb.schema;
return `${schema}."${tablePrefix}migrations"`;
}
return `"${tablePrefix}migrations"`;
}
beforeEach(async () => { beforeEach(async () => {
// Initialize connection without running migrations // Initialize connection without running migrations
const dbConnection = Container.get(DbConnection); const dbConnection = Container.get(DbConnection);
@ -15,6 +31,16 @@ describe('Migration Test Helpers', () => {
}); });
afterEach(async () => { afterEach(async () => {
// Clean up the migrations table
const globalConfig = Container.get(GlobalConfig);
if (globalConfig.database.type === 'postgresdb') {
try {
await dataSource.query(`TRUNCATE ${getMigrationsTableName()} CASCADE`);
} catch {
// Ignore errors if table doesn't exist
}
}
const dbConnection = Container.get(DbConnection); const dbConnection = Container.get(DbConnection);
await dbConnection.close(); await dbConnection.close();
}); });
@ -36,7 +62,9 @@ describe('Migration Test Helpers', () => {
console.log('Migrations executed up to ' + secondMigrationName); console.log('Migrations executed up to ' + secondMigrationName);
// Verify only first migration was executed // Verify only first migration was executed
const executed = await dataSource.query('SELECT * FROM migrations ORDER BY timestamp'); const executed = await dataSource.query(
`SELECT * FROM ${getMigrationsTableName()} ORDER BY timestamp`,
);
expect(executed).toHaveLength(1); expect(executed).toHaveLength(1);
expect(executed[0].name).toBe(migrations[0].name); expect(executed[0].name).toBe(migrations[0].name);
}); });
@ -60,7 +88,9 @@ describe('Migration Test Helpers', () => {
await runSingleMigration(secondMigrationName); await runSingleMigration(secondMigrationName);
const executed = await dataSource.query('SELECT * FROM migrations ORDER BY timestamp'); const executed = await dataSource.query(
`SELECT * FROM ${getMigrationsTableName()} ORDER BY timestamp`,
);
expect(executed).toHaveLength(2); expect(executed).toHaveLength(2);
expect(executed[0].name).toBe(migrations[0].name); expect(executed[0].name).toBe(migrations[0].name);
expect(executed[1].name).toBe(secondMigrationName); expect(executed[1].name).toBe(secondMigrationName);