feat(core): Add update_partial_workflow MCP tool (#29739)

This commit is contained in:
Ricardo Espinoza 2026-05-12 03:24:49 -04:00 committed by GitHub
parent 3dd134ab3c
commit b5bafc861e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 2084 additions and 469 deletions

View File

@ -123,6 +123,53 @@ export class ParseValidateHandler {
return allWarnings;
}
/**
* Run the same graph + JSON validation passes that `parseAndValidate` runs,
* but on a workflow that's already in JSON form (no parse step).
*
* Used by tools that mutate workflow JSON directly (e.g. partial update),
* so the resulting state is checked against the same rules a code-rewrite
* path would enforce. Does not throw collects all issues into warnings.
*/
validateJSON(json: WorkflowJSON): ValidationWarning[] {
if (json.nodes.length === 0) {
return [];
}
const allWarnings: ValidationWarning[] = [];
const builder = workflow.fromJSON(json);
const graphValidation = builder.validate();
this.collectValidationIssues(
graphValidation.errors,
allWarnings,
'GRAPH VALIDATION ERRORS',
'warn',
);
this.collectValidationIssues(
graphValidation.warnings,
allWarnings,
'GRAPH VALIDATION WARNINGS',
'info',
);
const jsonValidation = validateWorkflow(json);
this.collectValidationIssues(
jsonValidation.errors,
allWarnings,
'JSON VALIDATION ERRORS',
'warn',
);
this.collectValidationIssues(
jsonValidation.warnings,
allWarnings,
'JSON VALIDATION WARNINGS',
'info',
);
return allWarnings;
}
/**
* Parse TypeScript code to WorkflowJSON and validate.
*

View File

@ -398,4 +398,89 @@ describe('ParseValidateHandler', () => {
expect(mockValidateWorkflow).not.toHaveBeenCalled();
});
});
describe('validateJSON', () => {
const nonEmptyJson = {
id: 'test',
name: 'Test',
nodes: [{ type: 'n8n-nodes-base.set' }],
connections: {},
} as unknown as WorkflowJSON;
it('should return empty array when workflow has no nodes', () => {
const emptyJson = { id: 'test', name: 'Test', nodes: [], connections: {} };
const result = handler.validateJSON(emptyJson);
expect(result).toHaveLength(0);
expect(mockFromJSON).not.toHaveBeenCalled();
expect(mockValidateWorkflow).not.toHaveBeenCalled();
});
it('should return empty array when no graph or JSON issues', () => {
const mockBuilder = {
validate: jest.fn().mockReturnValue({ valid: true, errors: [], warnings: [] }),
};
mockFromJSON.mockReturnValue(mockBuilder);
mockValidateWorkflow.mockReturnValue({ valid: true, errors: [], warnings: [] });
const result = handler.validateJSON(nonEmptyJson);
expect(result).toHaveLength(0);
});
it('should collect graph errors and warnings', () => {
const mockBuilder = {
validate: jest.fn().mockReturnValue({
valid: false,
errors: [{ code: 'GRAPH_ERR', message: 'Graph error', nodeName: 'A' }],
warnings: [{ code: 'GRAPH_WARN', message: 'Graph warning' }],
}),
};
mockFromJSON.mockReturnValue(mockBuilder);
mockValidateWorkflow.mockReturnValue({ valid: true, errors: [], warnings: [] });
const result = handler.validateJSON(nonEmptyJson);
expect(result.map((w) => w.code)).toEqual(['GRAPH_ERR', 'GRAPH_WARN']);
});
it('should collect JSON errors and warnings', () => {
const mockBuilder = {
validate: jest.fn().mockReturnValue({ valid: true, errors: [], warnings: [] }),
};
mockFromJSON.mockReturnValue(mockBuilder);
mockValidateWorkflow.mockReturnValue({
valid: false,
errors: [{ code: 'JSON_ERR', message: 'JSON error' }],
warnings: [{ code: 'JSON_WARN', message: 'JSON warning', nodeName: 'B' }],
});
const result = handler.validateJSON(nonEmptyJson);
expect(result.map((w) => w.code)).toEqual(['JSON_ERR', 'JSON_WARN']);
});
it('should combine graph and JSON validation issues into a single warnings array', () => {
const mockBuilder = {
validate: jest.fn().mockReturnValue({
valid: false,
errors: [{ code: 'GRAPH_ERR', message: 'Graph error' }],
warnings: [],
}),
};
mockFromJSON.mockReturnValue(mockBuilder);
mockValidateWorkflow.mockReturnValue({
valid: false,
errors: [{ code: 'JSON_ERR', message: 'JSON error' }],
warnings: [],
});
const result = handler.validateJSON(nonEmptyJson);
expect(result.map((w) => w.code)).toEqual(['GRAPH_ERR', 'JSON_ERR']);
expect(mockFromJSON).toHaveBeenCalledWith(nonEmptyJson);
expect(mockValidateWorkflow).toHaveBeenCalledWith(nonEmptyJson);
});
});
});

View File

@ -1,19 +1,18 @@
import { mockInstance } from '@n8n/backend-test-utils';
import { SharedWorkflowRepository, User, WorkflowEntity } from '@n8n/db';
import type { INode } from 'n8n-workflow';
import { z } from 'zod';
import type { IConnections, INode } from 'n8n-workflow';
import { createUpdateWorkflowTool } from '../tools/workflow-builder/update-workflow.tool';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { CredentialsService } from '@/credentials/credentials.service';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { NodeTypes } from '@/node-types';
import { UrlService } from '@/services/url.service';
import { Telemetry } from '@/telemetry';
import { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import { WorkflowService } from '@/workflows/workflow.service';
// Mock credentials auto-assign
const mockAutoPopulateNodeCredentials = jest.fn();
jest.mock('../tools/workflow-builder/credentials-auto-assign', () => ({
autoPopulateNodeCredentials: (...args: unknown[]) =>
@ -21,63 +20,30 @@ jest.mock('../tools/workflow-builder/credentials-auto-assign', () => ({
stripNullCredentialStubs: jest.fn(),
}));
// Mock dynamic imports
const mockParseAndValidate = jest.fn();
const mockStripImportStatements = jest.fn((code: string) => code);
const mockValidateJSON = jest.fn().mockReturnValue([]);
jest.mock('@n8n/ai-workflow-builder', () => ({
ParseValidateHandler: jest.fn().mockImplementation(() => ({
parseAndValidate: mockParseAndValidate,
})),
stripImportStatements: (code: string) => mockStripImportStatements(code),
CODE_BUILDER_VALIDATE_TOOL: { toolName: 'validate_workflow_code', displayTitle: 'Validate' },
MCP_CREATE_WORKFLOW_FROM_CODE_TOOL: {
toolName: 'create_workflow_from_code',
displayTitle: 'Create Workflow from Code',
},
MCP_DELETE_WORKFLOW_TOOL: { toolName: 'delete_workflow', displayTitle: 'Delete Workflow' },
MCP_UPDATE_WORKFLOW_TOOL: {
toolName: 'update_workflow',
displayTitle: 'Update Workflow',
displayTitle: 'Updating workflow',
},
CODE_BUILDER_SEARCH_NODES_TOOL: { toolName: 'search', displayTitle: 'Search' },
CODE_BUILDER_GET_NODE_TYPES_TOOL: { toolName: 'get', displayTitle: 'Get' },
CODE_BUILDER_GET_SUGGESTED_NODES_TOOL: { toolName: 'suggest', displayTitle: 'Suggest' },
MCP_GET_SDK_REFERENCE_TOOL: { toolName: 'sdk_ref', displayTitle: 'SDK Ref' },
ParseValidateHandler: jest.fn().mockImplementation(() => ({
validateJSON: (json: unknown) => mockValidateJSON(json) as unknown,
})),
}));
const mockNodes: INode[] = [
{
id: 'node-1',
name: 'Webhook',
type: 'n8n-nodes-base.webhook',
typeVersion: 1,
position: [0, 0],
parameters: {},
},
{
id: 'node-2',
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [200, 0],
parameters: {},
},
];
const mockWorkflowJson = {
name: 'Updated Workflow',
nodes: mockNodes,
connections: {},
settings: { saveManualExecutions: true },
pinData: {},
meta: {},
};
/** Parse the first text content item from a tool result */
const parseResult = (result: { content: Array<{ type: string; text?: string }> }) =>
JSON.parse((result.content[0] as { type: 'text'; text: string }).text) as Record<string, unknown>;
const makeNode = (overrides: Partial<INode> = {}): INode => ({
id: 'node-id',
name: 'A',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [0, 0],
parameters: {},
...overrides,
});
describe('update-workflow MCP tool', () => {
const user = Object.assign(new User(), { id: 'user-1' });
let workflowFinderService: WorkflowFinderService;
@ -91,17 +57,29 @@ describe('update-workflow MCP tool', () => {
let nodeTypes: ReturnType<typeof mockInstance<NodeTypes>>;
let collaborationService: CollaborationService;
const mockExistingWorkflow = Object.assign(new WorkflowEntity(), {
id: 'wf-1',
name: 'Existing Workflow',
nodes: [] as INode[],
settings: { availableInMCP: true },
});
const buildExistingWorkflow = () =>
Object.assign(new WorkflowEntity(), {
id: 'wf-1',
name: 'Existing',
settings: { availableInMCP: true },
nodes: [
makeNode({ id: 'a', name: 'A' }),
makeNode({
id: 'b',
name: 'B',
position: [200, 0],
parameters: { url: 'https://old', method: 'GET' },
}),
],
connections: {
A: { main: [[{ node: 'B', type: 'main', index: 0 }]] },
} as IConnections,
});
beforeEach(() => {
jest.clearAllMocks();
findWorkflowMock = jest.fn().mockResolvedValue(mockExistingWorkflow);
findWorkflowMock = jest.fn().mockResolvedValue(buildExistingWorkflow());
workflowFinderService = mockInstance(WorkflowFinderService, {
findWorkflowForUser: findWorkflowMock,
});
@ -110,15 +88,11 @@ describe('update-workflow MCP tool', () => {
.mockImplementation(async (_user, workflow, workflowId) =>
Object.assign(new WorkflowEntity(), { ...workflow, id: workflowId }),
);
workflowService = mockInstance(WorkflowService, {
update: updateMock,
});
workflowService = mockInstance(WorkflowService, { update: updateMock });
urlService = mockInstance(UrlService, {
getInstanceBaseUrl: jest.fn().mockReturnValue('https://n8n.example.com'),
});
telemetry = mockInstance(Telemetry, {
track: jest.fn(),
});
telemetry = mockInstance(Telemetry, { track: jest.fn() });
credentialsService = mockInstance(CredentialsService);
sharedWorkflowRepository = mockInstance(SharedWorkflowRepository, {
findOneOrFail: jest.fn().mockResolvedValue({ projectId: 'project-1' }),
@ -128,12 +102,8 @@ describe('update-workflow MCP tool', () => {
ensureWorkflowEditable: jest.fn().mockResolvedValue(undefined),
broadcastWorkflowUpdate: jest.fn().mockResolvedValue(undefined),
});
mockParseAndValidate.mockImplementation(async () => ({
workflow: { ...mockWorkflowJson, nodes: mockNodes.map((n) => ({ ...n })) },
}));
mockStripImportStatements.mockImplementation((code: string) => code);
mockAutoPopulateNodeCredentials.mockResolvedValue({ assignments: [], skippedHttpNodes: [] });
mockValidateJSON.mockReturnValue([]);
});
const createTool = () =>
@ -149,39 +119,29 @@ describe('update-workflow MCP tool', () => {
collaborationService,
);
// Helper to call handler with proper typing (optional fields default to undefined)
const callHandler = async (
input: {
workflowId: string;
code: string;
name?: string;
description?: string;
},
input: { workflowId: string; operations: unknown[] },
tool = createTool(),
) =>
await tool.handler(
{
workflowId: input.workflowId,
code: input.code,
name: input.name as string,
description: input.description as string,
operations: input.operations as never,
},
{} as never,
);
describe('smoke tests', () => {
test('creates tool with correct name, config, and handler', () => {
test('exposes correct name, schemas, and handler', () => {
const tool = createTool();
expect(tool.name).toBe('update_workflow');
expect(tool.config).toBeDefined();
expect(typeof tool.config.description).toBe('string');
expect(tool.config.inputSchema).toBeDefined();
expect(tool.config.outputSchema).toBeDefined();
expect(tool.config.annotations).toEqual(
expect.objectContaining({
readOnlyHint: false,
destructiveHint: true,
idempotentHint: true,
idempotentHint: false,
openWorldHint: false,
}),
);
@ -189,13 +149,36 @@ describe('update-workflow MCP tool', () => {
});
});
describe('handler tests', () => {
describe('handler', () => {
test('applies updateNodeParameters and saves the workflow', async () => {
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
const response = parseResult(result);
expect(result.isError).toBeUndefined();
expect(response.workflowId).toBe('wf-1');
expect(response.appliedOperations).toBe(1);
const saved = updateMock.mock.calls[0][1] as WorkflowEntity;
const b = saved.nodes.find((n) => n.name === 'B')!;
expect(b.parameters).toEqual({ url: 'https://new', method: 'GET' });
});
test('returns error when workflow has active write lock', async () => {
(collaborationService.ensureWorkflowEditable as jest.Mock).mockRejectedValue(
new Error('Cannot modify workflow while it is being edited by a user in the editor.'),
);
const result = await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
const response = parseResult(result);
expect(result.isError).toBe(true);
@ -203,138 +186,129 @@ describe('update-workflow MCP tool', () => {
expect(workflowService.update).not.toHaveBeenCalled();
});
test('successfully updates workflow and returns expected response', async () => {
const result = await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const response = parseResult(result);
expect(response.workflowId).toBe('wf-1');
expect(response.name).toBeDefined();
expect(response.nodeCount).toBe(2);
expect(response.url).toBe('https://n8n.example.com/workflow/wf-1');
expect(response.autoAssignedCredentials).toEqual([]);
expect(result.isError).toBeUndefined();
expect(collaborationService.broadcastWorkflowUpdate).toHaveBeenCalledWith('wf-1', user.id);
});
test('sets correct workflow entity defaults', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const passedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
expect(passedWorkflow).toBeInstanceOf(WorkflowEntity);
expect(passedWorkflow.meta).toEqual(
expect.objectContaining({
aiBuilderAssisted: true,
}),
);
});
test('ignores settings from parsed code', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const passedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
expect(passedWorkflow.settings).toBeUndefined();
});
test('uses provided name over code name', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...', name: 'My Custom Name' });
expect(updateMock.mock.calls[0][1].name).toBe('My Custom Name');
});
test('uses code name when no name provided', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
expect(updateMock.mock.calls[0][1].name).toBe('Updated Workflow');
});
test('includes description when provided', async () => {
await callHandler({
test('rejects op referencing a nonexistent node and does not save', async () => {
const result = await callHandler({
workflowId: 'wf-1',
code: 'const wf = ...',
description: 'A test workflow',
operations: [{ type: 'updateNodeParameters', nodeName: 'Nope', parameters: { url: 'x' } }],
});
expect(updateMock.mock.calls[0][1].description).toBe('A test workflow');
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain('Operation 0 failed');
expect(response.error).toContain("node 'Nope' not found");
expect(workflowService.update).not.toHaveBeenCalled();
});
test('omits description when not provided', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
expect(updateMock.mock.calls[0][1].description).toBeUndefined();
});
test('passes correct workflowId to service', async () => {
await callHandler({ workflowId: 'custom-wf-id', code: 'const wf = ...' });
test('passes correct workflowId and metadata to workflowService.update', async () => {
await callHandler({
workflowId: 'wf-1',
operations: [{ type: 'setWorkflowMetadata', name: 'Renamed' }],
});
expect(workflowService.update).toHaveBeenCalledWith(
user,
expect.any(WorkflowEntity),
'custom-wf-id',
'wf-1',
{ aiBuilderAssisted: true, source: 'n8n-mcp' },
);
});
test('propagates errors from getMcpWorkflow', async () => {
findWorkflowMock.mockResolvedValue(null);
const result = await callHandler({ workflowId: 'wf-missing', code: 'const wf = ...' });
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toBe("Workflow not found or you don't have permission to access it.");
});
test('returns error when parse fails', async () => {
mockParseAndValidate.mockRejectedValue(new Error('Invalid syntax at line 5'));
const result = await callHandler({ workflowId: 'wf-1', code: 'bad code' });
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toBe('Invalid syntax at line 5');
});
test('includes SDK reference hint only for parse errors', async () => {
const parseError = new Error('Failed to parse generated workflow code: unexpected token');
parseError.name = 'WorkflowCodeParseError';
mockParseAndValidate.mockRejectedValue(parseError);
const result = await callHandler({ workflowId: 'wf-1', code: 'bad code' });
const response = parseResult(result);
expect(response.hint).toContain('sdk_ref');
});
test('does not include SDK reference hint for non-parse errors', async () => {
mockParseAndValidate.mockRejectedValue(new Error('Service unavailable'));
const result = await callHandler({ workflowId: 'wf-1', code: 'bad code' });
const response = parseResult(result);
expect(response.hint).toBeUndefined();
});
test('tracks telemetry on success', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
expect(telemetry.track).toHaveBeenCalledWith(
'User called mcp tool',
expect.objectContaining({
user_id: 'user-1',
tool_name: 'update_workflow',
results: expect.objectContaining({
success: true,
data: expect.objectContaining({
workflowId: 'wf-1',
nodeCount: 2,
}),
}),
}),
expect(updateMock.mock.calls[0][1].name).toBe('Renamed');
expect(updateMock.mock.calls[0][1].meta).toEqual(
expect.objectContaining({ aiBuilderAssisted: true, builderVariant: 'mcp' }),
);
});
test('assigns webhookId to webhook nodes before saving', async () => {
test('broadcasts workflow update on success', async () => {
await callHandler({
workflowId: 'wf-1',
operations: [{ type: 'setWorkflowMetadata', name: 'Renamed' }],
});
expect(collaborationService.broadcastWorkflowUpdate).toHaveBeenCalledWith('wf-1', user.id);
});
test('only auto-assigns credentials for nodes added in this batch', async () => {
await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'addNode',
node: { name: 'C', type: 'n8n-nodes-base.slack', typeVersion: 1 },
},
{
type: 'updateNodeParameters',
nodeName: 'B',
parameters: { url: 'https://new' },
},
],
});
expect(mockAutoPopulateNodeCredentials).toHaveBeenCalledTimes(1);
const slimWorkflow = mockAutoPopulateNodeCredentials.mock.calls[0][0] as {
nodes: INode[];
};
expect(slimWorkflow.nodes.map((n) => n.name)).toEqual(['C']);
});
test('skips credential auto-assign entirely when no nodes are added', async () => {
await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
expect(mockAutoPopulateNodeCredentials).not.toHaveBeenCalled();
expect(sharedWorkflowRepository.findOneOrFail).not.toHaveBeenCalled();
});
test('reports auto-assigned credentials in the response', async () => {
mockAutoPopulateNodeCredentials.mockResolvedValue({
assignments: [{ nodeName: 'C', credentialName: 'My Slack', credentialType: 'slackApi' }],
skippedHttpNodes: [],
});
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'addNode',
node: { name: 'C', type: 'n8n-nodes-base.slack', typeVersion: 1 },
},
],
});
const response = parseResult(result);
expect(response.autoAssignedCredentials).toEqual([
{ nodeName: 'C', credentialName: 'My Slack', credentialType: 'slackApi' },
]);
});
test('reports skipped HTTP nodes in the note', async () => {
mockAutoPopulateNodeCredentials.mockResolvedValue({
assignments: [],
skippedHttpNodes: ['HTTP Request'],
});
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'addNode',
node: {
name: 'HTTP Request',
type: 'n8n-nodes-base.httpRequest',
typeVersion: 1,
},
},
],
});
const response = parseResult(result);
expect(response.note).toBe(
'HTTP Request nodes (HTTP Request) were skipped during credential auto-assignment. Their credentials must be configured manually.',
);
});
test('assigns webhookId to a webhook node added via addNode', async () => {
nodeTypes.getByNameAndVersion.mockImplementation(((type: string) => {
if (type === 'n8n-nodes-base.webhook') {
return { description: { webhooks: [{ httpMethod: 'GET', path: '' }] } };
@ -342,209 +316,312 @@ describe('update-workflow MCP tool', () => {
return { description: {} };
}) as typeof nodeTypes.getByNameAndVersion);
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'addNode',
node: { name: 'Webhook', type: 'n8n-nodes-base.webhook', typeVersion: 1 },
},
],
});
const savedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
const webhookNode = savedWorkflow.nodes.find(
(n: INode) => n.type === 'n8n-nodes-base.webhook',
);
const setNode = savedWorkflow.nodes.find((n: INode) => n.type === 'n8n-nodes-base.set');
expect(webhookNode!.webhookId).toBeDefined();
expect(typeof webhookNode!.webhookId).toBe('string');
expect(setNode!.webhookId).toBeUndefined();
const saved = updateMock.mock.calls[0][1] as WorkflowEntity;
const webhook = saved.nodes.find((n) => n.name === 'Webhook')!;
expect(webhook.webhookId).toBeDefined();
expect(typeof webhook.webhookId).toBe('string');
});
test('tracks telemetry on failure', async () => {
mockParseAndValidate.mockRejectedValue(new Error('Parse failed'));
test('returns error when workflow not found', async () => {
findWorkflowMock.mockResolvedValue(null);
await callHandler({ workflowId: 'wf-1', code: 'bad code' });
const result = await callHandler({
workflowId: 'wf-missing',
operations: [{ type: 'setWorkflowMetadata', name: 'x' }],
});
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toBe("Workflow not found or you don't have permission to access it.");
});
test('tracks telemetry on success with op metadata', async () => {
await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'setWorkflowMetadata', name: 'Renamed' },
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
expect(telemetry.track).toHaveBeenCalledWith(
'User called mcp tool',
expect.objectContaining({
user_id: 'user-1',
tool_name: 'update_workflow',
results: expect.objectContaining({
success: false,
error: 'Parse failed',
parameters: expect.objectContaining({
workflowId: 'wf-1',
opCount: 2,
opTypes: ['setWorkflowMetadata', 'updateNodeParameters'],
}),
results: expect.objectContaining({ success: true }),
}),
);
});
test('calls autoPopulateNodeCredentials with correct arguments', async () => {
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
test('tracks telemetry on failure', async () => {
const result = await callHandler({
workflowId: 'wf-1',
operations: [{ type: 'updateNodeParameters', nodeName: 'Nope', parameters: { url: 'x' } }],
});
expect(result.isError).toBe(true);
expect(mockAutoPopulateNodeCredentials).toHaveBeenCalledWith(
expect.any(WorkflowEntity),
user,
nodeTypes,
credentialsService,
'project-1',
expect(telemetry.track).toHaveBeenCalledWith(
'User called mcp tool',
expect.objectContaining({
tool_name: 'update_workflow',
results: expect.objectContaining({ success: false }),
}),
);
});
test('includes auto-assigned credentials in response', async () => {
mockAutoPopulateNodeCredentials.mockResolvedValue({
assignments: [
{ nodeName: 'Webhook', credentialName: 'My Cred', credentialType: 'webhookAuth' },
],
skippedHttpNodes: [],
});
const result = await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const response = parseResult(result);
expect(response.autoAssignedCredentials).toEqual([
{ nodeName: 'Webhook', credentialName: 'My Cred', credentialType: 'webhookAuth' },
]);
expect(response.note).toBeUndefined();
});
test('structuredContent conforms to declared outputSchema under strict validation', async () => {
// Regression for #28274: MCP publishes outputSchema with additionalProperties: false,
// so any field returned by the handler but missing from the schema breaks strict clients.
mockAutoPopulateNodeCredentials.mockResolvedValue({
assignments: [
{ nodeName: 'Webhook', credentialName: 'My Cred', credentialType: 'webhookAuth' },
],
skippedHttpNodes: [],
});
const tool = createTool();
const result = (await tool.handler(
{ workflowId: 'wf-1', code: 'const wf = ...' } as never,
{} as never,
)) as { structuredContent: unknown };
const envelopeShape = tool.config.outputSchema as z.ZodRawShape;
const itemsField = envelopeShape.autoAssignedCredentials as z.ZodArray<
z.ZodObject<z.ZodRawShape>
>;
const strictSchema = z
.object({
...envelopeShape,
autoAssignedCredentials: z.array(itemsField.element.strict()),
})
.strict();
expect(() => strictSchema.parse(result.structuredContent)).not.toThrow();
});
test('includes note about skipped HTTP nodes', async () => {
mockAutoPopulateNodeCredentials.mockResolvedValue({
assignments: [],
skippedHttpNodes: ['HTTP Request', 'HTTP Request1'],
});
const result = await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const response = parseResult(result);
expect(response.note).toBe(
'HTTP Request nodes (HTTP Request, HTTP Request1) were skipped during credential auto-assignment. Their credentials must be configured manually.',
);
});
describe('credential preservation from existing workflow', () => {
test('copies credentials from existing node when name and type match and updated node has none', async () => {
findWorkflowMock.mockResolvedValue(
Object.assign(new WorkflowEntity(), {
id: 'wf-1',
name: 'Existing Workflow',
settings: { availableInMCP: true },
nodes: [
{
id: 'node-2',
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [200, 0] as [number, number],
parameters: {},
credentials: { setApi: { id: 'cred-1', name: 'My Set Cred' } },
},
] as INode[],
}),
);
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const savedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
const setNode = savedWorkflow.nodes.find((n: INode) => n.name === 'Set');
expect(setNode!.credentials).toEqual({ setApi: { id: 'cred-1', name: 'My Set Cred' } });
});
test('does not copy credentials when node type differs', async () => {
findWorkflowMock.mockResolvedValue(
Object.assign(new WorkflowEntity(), {
id: 'wf-1',
name: 'Existing Workflow',
settings: { availableInMCP: true },
nodes: [
{
id: 'node-2',
name: 'Set',
type: 'n8n-nodes-base.differentType',
typeVersion: 1,
position: [200, 0] as [number, number],
parameters: {},
credentials: { setApi: { id: 'cred-1', name: 'My Set Cred' } },
},
] as INode[],
}),
);
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const savedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
const setNode = savedWorkflow.nodes.find((n: INode) => n.name === 'Set');
expect(setNode!.credentials).toBeUndefined();
});
test('does not overwrite credentials already set on the updated node', async () => {
const newNodeCredentials = { setApi: { id: 'cred-new', name: 'New Cred' } };
mockParseAndValidate.mockResolvedValue({
workflow: {
...mockWorkflowJson,
nodes: mockNodes.map((n) =>
n.name === 'Set' ? { ...n, credentials: newNodeCredentials } : n,
),
},
describe('validation', () => {
test('passes the post-apply workflow JSON to validateJSON', async () => {
await callHandler({
workflowId: 'wf-1',
operations: [{ type: 'setWorkflowMetadata', name: 'Renamed' }],
});
expect(mockValidateJSON).toHaveBeenCalledTimes(1);
const json = mockValidateJSON.mock.calls[0][0] as {
name: string;
nodes: INode[];
connections: IConnections;
};
expect(json.name).toBe('Renamed');
expect(json.nodes.map((n) => n.name)).toEqual(['A', 'B']);
expect(json.connections).toEqual({
A: { main: [[{ node: 'B', type: 'main', index: 0 }]] },
});
});
test('surfaces validation warnings in the response', async () => {
mockValidateJSON.mockReturnValue([
{ code: 'GRAPH_ERR', message: 'unwired node', nodeName: 'B' },
{ code: 'JSON_WARN', message: 'parameter missing' },
]);
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
const response = parseResult(result);
expect(result.isError).toBeUndefined();
expect(response.validationWarnings).toEqual([
{ code: 'GRAPH_ERR', message: 'unwired node', nodeName: 'B' },
{ code: 'JSON_WARN', message: 'parameter missing' },
]);
});
test('does not block save when validation produces warnings', async () => {
mockValidateJSON.mockReturnValue([
{ code: 'GRAPH_ERR', message: 'unwired node', nodeName: 'B' },
]);
await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
expect(workflowService.update).toHaveBeenCalled();
});
test('returns an empty validationWarnings array when there are no issues', async () => {
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
],
});
const response = parseResult(result);
expect(response.validationWarnings).toEqual([]);
});
});
describe('credential validation', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockImplementation(((type: string) => {
if (type === 'n8n-nodes-base.slack') {
return { description: { credentials: [{ name: 'slackApi' }] } };
}
if (type === 'n8n-nodes-base.set') {
return { description: { credentials: [] } };
}
return { description: {} };
}) as typeof nodeTypes.getByNameAndVersion);
(credentialsService.getOne as jest.Mock).mockImplementation(async (_user, id: string) => {
if (id === 'cred-slack') return { id, name: 'My Slack', type: 'slackApi' };
if (id === 'cred-wrong-type') return { id, name: 'Wrong', type: 'discordApi' };
throw new NotFoundError(`Credential with ID "${id}" could not be found.`);
});
});
test('rejects setNodeCredential with a non-existent credential id', async () => {
findWorkflowMock.mockResolvedValue(
Object.assign(new WorkflowEntity(), {
id: 'wf-1',
name: 'Existing Workflow',
settings: { availableInMCP: true },
nodes: [
{
id: 'node-2',
name: 'Set',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [200, 0] as [number, number],
parameters: {},
credentials: { setApi: { id: 'cred-old', name: 'Old Cred' } },
},
] as INode[],
Object.assign(buildExistingWorkflow(), {
nodes: [makeNode({ id: 's', name: 'Slack', type: 'n8n-nodes-base.slack' })],
connections: {},
}),
);
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'setNodeCredential',
nodeName: 'Slack',
credentialKey: 'slackApi',
credentialId: 'cred-missing',
credentialName: 'Whatever',
},
],
});
const savedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
const setNode = savedWorkflow.nodes.find((n: INode) => n.name === 'Set');
expect(setNode!.credentials).toEqual(newNodeCredentials);
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain('Operation 0 failed');
expect(response.error).toContain("credential 'cred-missing' not found");
expect(workflowService.update).not.toHaveBeenCalled();
});
test('handles existing workflow with no nodes without error', async () => {
// mockExistingWorkflow already has nodes: [] — verify no crash and no credentials copied
await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
test('rejects setNodeCredential when credential type does not match the key', async () => {
findWorkflowMock.mockResolvedValue(
Object.assign(buildExistingWorkflow(), {
nodes: [makeNode({ id: 's', name: 'Slack', type: 'n8n-nodes-base.slack' })],
connections: {},
}),
);
const savedWorkflow = updateMock.mock.calls[0][1] as WorkflowEntity;
for (const node of savedWorkflow.nodes) {
expect(node.credentials).toBeUndefined();
}
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'setNodeCredential',
nodeName: 'Slack',
credentialKey: 'slackApi',
credentialId: 'cred-wrong-type',
credentialName: 'Wrong',
},
],
});
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain("is type 'discordApi'");
expect(workflowService.update).not.toHaveBeenCalled();
});
test('rejects setNodeCredential when the node type does not accept the credential key', async () => {
findWorkflowMock.mockResolvedValue(
Object.assign(buildExistingWorkflow(), {
nodes: [makeNode({ id: 's', name: 'Setter', type: 'n8n-nodes-base.set' })],
connections: {},
}),
);
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'setNodeCredential',
nodeName: 'Setter',
credentialKey: 'slackApi',
credentialId: 'cred-slack',
credentialName: 'My Slack',
},
],
});
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain("does not accept credential 'slackApi'");
expect(workflowService.update).not.toHaveBeenCalled();
});
test('accepts a setNodeCredential whose id, type and key all match', async () => {
findWorkflowMock.mockResolvedValue(
Object.assign(buildExistingWorkflow(), {
nodes: [makeNode({ id: 's', name: 'Slack', type: 'n8n-nodes-base.slack' })],
connections: {},
}),
);
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'setNodeCredential',
nodeName: 'Slack',
credentialKey: 'slackApi',
credentialId: 'cred-slack',
credentialName: 'My Slack',
},
],
});
expect(result.isError).toBeUndefined();
expect(workflowService.update).toHaveBeenCalled();
});
test('rejects addNode with an unknown credential id', async () => {
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'addNode',
node: {
name: 'Slack',
type: 'n8n-nodes-base.slack',
typeVersion: 1,
credentials: {
slackApi: { id: 'cred-missing', name: 'Whatever' },
},
},
},
],
});
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain("credential 'cred-missing' not found");
expect(workflowService.update).not.toHaveBeenCalled();
});
test('allows addNode credentials with no id (auto-assign will pick one)', async () => {
const result = await callHandler({
workflowId: 'wf-1',
operations: [
{
type: 'addNode',
node: {
name: 'Slack',
type: 'n8n-nodes-base.slack',
typeVersion: 1,
credentials: { slackApi: { name: 'My Slack' } },
},
},
],
});
expect(result.isError).toBeUndefined();
expect(workflowService.update).toHaveBeenCalled();
});
});
});

View File

@ -0,0 +1,702 @@
import type { IConnections, INode } from 'n8n-workflow';
import {
applyOperations,
partialUpdateOperationSchema,
type PartialUpdateOperation,
} from '../tools/workflow-builder/workflow-operations';
const makeNode = (overrides: Partial<INode> = {}): INode => ({
id: 'node-id',
name: 'A',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [0, 0],
parameters: {},
...overrides,
});
const baseWorkflow = () => ({
name: 'wf',
description: 'd',
nodes: [
makeNode({ id: 'a', name: 'A', position: [0, 0] }),
makeNode({ id: 'b', name: 'B', position: [200, 0], parameters: { url: 'https://old' } }),
],
connections: {
A: { main: [[{ node: 'B', type: 'main', index: 0 }]] },
} as IConnections,
});
describe('applyOperations', () => {
describe('updateNodeParameters', () => {
test('deep-merges by default', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')!.parameters).toEqual({
url: 'https://new',
});
});
test('preserves untouched parameter keys when merging', () => {
const wf = baseWorkflow();
wf.nodes[1].parameters = { url: 'https://old', method: 'GET' };
const ops: PartialUpdateOperation[] = [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')!.parameters).toEqual({
url: 'https://new',
method: 'GET',
});
});
test('replace=true overwrites parameters', () => {
const ops: PartialUpdateOperation[] = [
{
type: 'updateNodeParameters',
nodeName: 'B',
parameters: { method: 'POST' },
replace: true,
},
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')!.parameters).toEqual({
method: 'POST',
});
});
test('rejects when node does not exist', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'updateNodeParameters', nodeName: 'Missing', parameters: { x: 1 } },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error).toContain("node 'Missing' not found");
expect(result.opIndex).toBe(0);
});
test('does not mutate input on success', () => {
const wf = baseWorkflow();
const before = JSON.stringify(wf);
applyOperations(wf, [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
]);
expect(JSON.stringify(wf)).toBe(before);
});
});
describe('setNodeParameter', () => {
test('sets a top-level parameter via JSON Pointer', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/url', value: 'https://new' },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')!.parameters).toEqual({
url: 'https://new',
});
});
test('preserves sibling keys at the leaf', () => {
const wf = baseWorkflow();
wf.nodes[1].parameters = { url: 'https://old', method: 'GET' };
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/method', value: 'POST' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')!.parameters).toEqual({
url: 'https://old',
method: 'POST',
});
});
test('creates intermediate objects on demand for a deep path', () => {
const ops: PartialUpdateOperation[] = [
{
type: 'setNodeParameter',
nodeName: 'B',
path: '/options/systemMessage',
value: 'You are a helpful assistant',
},
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
const params = result.workflow.nodes.find((n) => n.name === 'B')!.parameters as {
url: string;
options: { systemMessage: string };
};
expect(params.url).toBe('https://old');
expect(params.options.systemMessage).toBe('You are a helpful assistant');
});
test('descends into existing nested objects without clobbering siblings', () => {
const wf = baseWorkflow();
wf.nodes[1].parameters = { options: { mode: 'manual', timeout: 30 } };
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/options/timeout', value: 60 },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')!.parameters).toEqual({
options: { mode: 'manual', timeout: 60 },
});
});
test('accepts non-string values', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/retries', value: 3 },
{ type: 'setNodeParameter', nodeName: 'B', path: '/disabled', value: false },
{ type: 'setNodeParameter', nodeName: 'B', path: '/headers', value: { 'x-id': '1' } },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
const params = result.workflow.nodes.find((n) => n.name === 'B')!.parameters;
expect(params).toMatchObject({
retries: 3,
disabled: false,
headers: { 'x-id': '1' },
});
});
test('decodes ~1 and ~0 escapes in path segments', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/a~1b', value: 1 },
{ type: 'setNodeParameter', nodeName: 'B', path: '/c~0d', value: 2 },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
const params = result.workflow.nodes.find((n) => n.name === 'B')!.parameters as Record<
string,
unknown
>;
expect(params['a/b']).toBe(1);
expect(params['c~d']).toBe(2);
});
test('rejects when node does not exist', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'Missing', path: '/x', value: 1 },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(false);
});
test('rejects path that does not start with /', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: 'url', value: 'x' },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(false);
});
test('rejects unsafe segment in path', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/__proto__/polluted', value: true },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(false);
expect(({} as Record<string, unknown>).polluted).toBeUndefined();
});
test('sanitizes unsafe keys inside the value', () => {
const ops: PartialUpdateOperation[] = [
{
type: 'setNodeParameter',
nodeName: 'B',
path: '/options',
value: { __proto__: { polluted: true }, mode: 'manual' },
},
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
const options = (
result.workflow.nodes.find((n) => n.name === 'B')!.parameters as {
options: Record<string, unknown>;
}
).options;
expect(options.mode).toBe('manual');
expect(Object.prototype.hasOwnProperty.call(options, '__proto__')).toBe(false);
expect(({} as Record<string, unknown>).polluted).toBeUndefined();
});
test('rejects descent through a non-object intermediate', () => {
const wf = baseWorkflow();
wf.nodes[1].parameters = { options: 'not-an-object' };
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/options/mode', value: 'manual' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error).toContain('cannot descend');
});
test('rejects descent through a null intermediate (does not silently overwrite)', () => {
const wf = baseWorkflow();
wf.nodes[1].parameters = { options: null };
const ops: PartialUpdateOperation[] = [
{ type: 'setNodeParameter', nodeName: 'B', path: '/options/mode', value: 'manual' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error).toContain('cannot descend');
expect(wf.nodes[1].parameters).toEqual({ options: null });
});
test('does not mutate input on success', () => {
const wf = baseWorkflow();
const before = JSON.stringify(wf);
applyOperations(wf, [
{ type: 'setNodeParameter', nodeName: 'B', path: '/url', value: 'https://new' },
]);
expect(JSON.stringify(wf)).toBe(before);
});
test('schema rejects an omitted (undefined) value', () => {
const parsed = partialUpdateOperationSchema.safeParse({
type: 'setNodeParameter',
nodeName: 'B',
path: '/url',
});
expect(parsed.success).toBe(false);
});
test('rejects paths with empty segments', () => {
for (const path of ['/foo//bar', '/foo/', '//bar']) {
const result = applyOperations(baseWorkflow(), [
{ type: 'setNodeParameter', nodeName: 'B', path, value: 1 },
]);
expect(result.success).toBe(false);
if (result.success) continue;
expect(result.error).toContain('invalid');
}
});
test('rejects paths with invalid ~ escape sequences', () => {
for (const path of ['/foo~2bar', '/foo~', '/~', '/foo/bar~']) {
const result = applyOperations(baseWorkflow(), [
{ type: 'setNodeParameter', nodeName: 'B', path, value: 1 },
]);
expect(result.success).toBe(false);
if (result.success) continue;
expect(result.error).toContain('invalid');
}
});
test('fails clearly when descending through an array (indices not supported)', () => {
const wf = baseWorkflow();
wf.nodes[1].parameters = { values: [{ name: 'Content-Type', value: 'application/json' }] };
const result = applyOperations(wf, [
{ type: 'setNodeParameter', nodeName: 'B', path: '/values/0/value', value: 'text/plain' },
]);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error).toContain('cannot descend');
});
});
describe('addNode', () => {
test('appends a new node and tracks it as added', () => {
const ops: PartialUpdateOperation[] = [
{
type: 'addNode',
node: {
name: 'C',
type: 'n8n-nodes-base.set',
typeVersion: 1,
parameters: { value: 1 },
},
},
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes).toHaveLength(3);
expect(result.workflow.nodes[2].name).toBe('C');
expect(result.workflow.nodes[2].id).toBeTruthy();
expect(result.addedNodeNames).toEqual(['C']);
});
test('uses provided position and id', () => {
const ops: PartialUpdateOperation[] = [
{
type: 'addNode',
node: {
id: 'fixed-id',
name: 'C',
type: 'n8n-nodes-base.set',
typeVersion: 1,
position: [400, 100],
},
},
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
const c = result.workflow.nodes.find((n) => n.name === 'C')!;
expect(c.id).toBe('fixed-id');
expect(c.position).toEqual([400, 100]);
});
test('rejects when name already exists', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'addNode', node: { name: 'A', type: 'n8n-nodes-base.set', typeVersion: 1 } },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error).toContain("a node named 'A' already exists");
});
});
describe('removeNode', () => {
test('removes node and prunes inbound + outbound connections', () => {
const wf = baseWorkflow();
wf.connections.B = { main: [[{ node: 'A', type: 'main', index: 0 }]] };
const ops: PartialUpdateOperation[] = [{ type: 'removeNode', nodeName: 'B' }];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes).toHaveLength(1);
expect(result.workflow.connections).toEqual({});
});
test('rejects when node does not exist', () => {
const result = applyOperations(baseWorkflow(), [{ type: 'removeNode', nodeName: 'Nope' }]);
expect(result.success).toBe(false);
});
test('untracks an added node when it is removed in the same batch', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'addNode', node: { name: 'C', type: 'n8n-nodes-base.set', typeVersion: 1 } },
{ type: 'removeNode', nodeName: 'C' },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.addedNodeNames).toEqual([]);
});
});
describe('renameNode', () => {
test('renames node and rewrites connections both as source and target', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'renameNode', oldName: 'B', newName: 'BRenamed' },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes.find((n) => n.name === 'B')).toBeUndefined();
expect(result.workflow.nodes.find((n) => n.name === 'BRenamed')).toBeDefined();
expect(result.workflow.connections.A.main[0]![0].node).toBe('BRenamed');
});
test('renames source-key references too', () => {
const wf = baseWorkflow();
wf.connections.B = { main: [[{ node: 'A', type: 'main', index: 0 }]] };
const ops: PartialUpdateOperation[] = [
{ type: 'renameNode', oldName: 'B', newName: 'BRenamed' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.connections.B).toBeUndefined();
expect(result.workflow.connections.BRenamed).toEqual({
main: [[{ node: 'A', type: 'main', index: 0 }]],
});
});
test('no-op when oldName equals newName', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'renameNode', oldName: 'A', newName: 'A' },
]);
expect(result.success).toBe(true);
});
test('rejects when newName collides', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'renameNode', oldName: 'A', newName: 'B' },
]);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error).toContain("a node named 'B' already exists");
});
test('rejects when oldName does not exist', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'renameNode', oldName: 'X', newName: 'Y' },
]);
expect(result.success).toBe(false);
});
});
describe('addConnection', () => {
test('adds a connection with default indices and main type', () => {
const wf = baseWorkflow();
wf.connections = {};
const ops: PartialUpdateOperation[] = [{ type: 'addConnection', source: 'A', target: 'B' }];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.connections.A.main[0]).toEqual([
{ node: 'B', type: 'main', index: 0 },
]);
});
test('is idempotent — adding the same connection twice yields one entry', () => {
const wf = baseWorkflow();
wf.connections = {};
const ops: PartialUpdateOperation[] = [
{ type: 'addConnection', source: 'A', target: 'B' },
{ type: 'addConnection', source: 'A', target: 'B' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.connections.A.main[0]).toHaveLength(1);
});
test('pads earlier output indices with null when adding to a higher index', () => {
const wf = baseWorkflow();
wf.connections = {};
const ops: PartialUpdateOperation[] = [
{ type: 'addConnection', source: 'A', target: 'B', sourceIndex: 2 },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.connections.A.main).toEqual([
null,
null,
[{ node: 'B', type: 'main', index: 0 }],
]);
});
test('rejects when source node is missing', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'addConnection', source: 'Missing', target: 'B' },
]);
expect(result.success).toBe(false);
});
test('rejects when target node is missing', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'addConnection', source: 'A', target: 'Missing' },
]);
expect(result.success).toBe(false);
});
});
describe('removeConnection', () => {
test('removes the matching connection and prunes empty shapes', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'removeConnection', source: 'A', target: 'B' },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.connections).toEqual({});
});
test('rejects when no such connection exists', () => {
const ops: PartialUpdateOperation[] = [
{ type: 'removeConnection', source: 'A', target: 'B', sourceIndex: 5 },
];
const result = applyOperations(baseWorkflow(), ops);
expect(result.success).toBe(false);
});
});
describe('setNodeCredential', () => {
test('sets credentials and preserves other credential entries', () => {
const wf = baseWorkflow();
wf.nodes[0].credentials = { other: { id: 'o1', name: 'OtherCred' } };
const ops: PartialUpdateOperation[] = [
{
type: 'setNodeCredential',
nodeName: 'A',
credentialKey: 'slackApi',
credentialId: 'cred-1',
credentialName: 'My Slack',
},
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes[0].credentials).toEqual({
other: { id: 'o1', name: 'OtherCred' },
slackApi: { id: 'cred-1', name: 'My Slack' },
});
});
});
describe('setNodePosition / setNodeDisabled', () => {
test('updates position', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'setNodePosition', nodeName: 'A', position: [123, 456] },
]);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes[0].position).toEqual([123, 456]);
});
test('updates disabled flag', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'setNodeDisabled', nodeName: 'A', disabled: true },
]);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.nodes[0].disabled).toBe(true);
});
});
describe('setWorkflowMetadata', () => {
test('updates name and description', () => {
const result = applyOperations(baseWorkflow(), [
{ type: 'setWorkflowMetadata', name: 'New', description: 'updated' },
]);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.workflow.name).toBe('New');
expect(result.workflow.description).toBe('updated');
});
});
describe('atomicity', () => {
test('rolls back the whole batch if any op fails', () => {
const wf = baseWorkflow();
const before = JSON.stringify(wf);
const ops: PartialUpdateOperation[] = [
{ type: 'updateNodeParameters', nodeName: 'B', parameters: { url: 'https://new' } },
{ type: 'removeNode', nodeName: 'Missing' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.opIndex).toBe(1);
expect(JSON.stringify(wf)).toBe(before);
});
});
describe('object key safety', () => {
test('strips unsafe keys from updateNodeParameters merge', () => {
const wf = baseWorkflow();
const ops: PartialUpdateOperation[] = [
{
type: 'updateNodeParameters',
nodeName: 'B',
parameters: { __proto__: { polluted: true }, url: 'https://safe' } as Record<
string,
unknown
>,
},
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
expect(({} as Record<string, unknown>).polluted).toBeUndefined();
const params = result.workflow.nodes.find((n) => n.name === 'B')!.parameters as Record<
string,
unknown
>;
expect(params.url).toBe('https://safe');
expect(Object.prototype.hasOwnProperty.call(params, '__proto__')).toBe(false);
});
test('strips unsafe keys from nested parameters', () => {
const wf = baseWorkflow();
const ops: PartialUpdateOperation[] = [
{
type: 'updateNodeParameters',
nodeName: 'B',
parameters: {
options: { constructor: { polluted: true }, mode: 'manual' },
} as Record<string, unknown>,
},
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(true);
if (!result.success) return;
const params = result.workflow.nodes.find((n) => n.name === 'B')!.parameters as Record<
string,
Record<string, unknown>
>;
expect(params.options.mode).toBe('manual');
expect(Object.prototype.hasOwnProperty.call(params.options, 'constructor')).toBe(false);
});
test('rejects addNode with unsafe name', () => {
const wf = baseWorkflow();
const ops: PartialUpdateOperation[] = [
{
type: 'addNode',
node: { name: '__proto__', type: 'n8n-nodes-base.set', typeVersion: 1 },
},
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
});
test('rejects renameNode to unsafe name', () => {
const wf = baseWorkflow();
const ops: PartialUpdateOperation[] = [
{ type: 'renameNode', oldName: 'A', newName: 'constructor' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
});
test('rejects addConnection with unsafe source', () => {
const wf = baseWorkflow();
wf.nodes.push(makeNode({ id: 'p', name: '__proto__' }));
const ops: PartialUpdateOperation[] = [
{ type: 'addConnection', source: '__proto__', target: 'B' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
});
test('rejects addConnection with unsafe connectionType', () => {
const wf = baseWorkflow();
const ops: PartialUpdateOperation[] = [
{ type: 'addConnection', source: 'A', target: 'B', connectionType: '__proto__' },
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
});
test('rejects setNodeCredential with unsafe credentialKey', () => {
const wf = baseWorkflow();
const ops: PartialUpdateOperation[] = [
{
type: 'setNodeCredential',
nodeName: 'B',
credentialKey: '__proto__',
credentialId: 'c1',
credentialName: 'cred',
},
];
const result = applyOperations(wf, ops);
expect(result.success).toBe(false);
});
});
});

View File

@ -0,0 +1,143 @@
import type { User } from '@n8n/db';
import type { IWorkflowBase } from 'n8n-workflow';
import type { CredentialsService } from '@/credentials/credentials.service';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import type { NodeTypes } from '@/node-types';
import type { PartialUpdateOperation } from './workflow-operations';
export interface CredentialValidationFailure {
ok: false;
opIndex: number;
error: string;
}
export interface CredentialValidationSuccess {
ok: true;
}
export type CredentialValidationResult = CredentialValidationSuccess | CredentialValidationFailure;
interface NodeMeta {
type: string;
typeVersion: number;
}
const fail = (opIndex: number, message: string): CredentialValidationFailure => ({
ok: false,
opIndex,
error: `Operation ${opIndex} failed: ${message}`,
});
/**
* Validate every credential reference introduced by the batch against the
* caller's accessible credentials and against the target node-type's declared
* credential keys.
*
* Only credentials touched by ops in this batch are checked pre-existing
* credential references on nodes the agent didn't touch are left alone, so a
* pre-existing invalid reference can't block an unrelated edit.
*
* The check is non-destructive: it only does DB reads and node-type lookups.
* On the first failure we stop and return the offending op index so the
* handler can surface it via the standard `Operation N failed: ...` envelope.
*/
export async function validateCredentialReferences(
operations: PartialUpdateOperation[],
existingWorkflow: IWorkflowBase,
user: User,
credentialsService: CredentialsService,
nodeTypes: NodeTypes,
): Promise<CredentialValidationResult> {
const nameToNodeMeta = new Map<string, NodeMeta>();
for (const node of existingWorkflow.nodes) {
nameToNodeMeta.set(node.name, { type: node.type, typeVersion: node.typeVersion });
}
const credentialCache = new Map<string, { type: string } | 'not-found'>();
const lookupCredential = async (credentialId: string) => {
const cached = credentialCache.get(credentialId);
if (cached) return cached;
try {
const credential = await credentialsService.getOne(user, credentialId, false);
const result = { type: credential.type };
credentialCache.set(credentialId, result);
return result;
} catch (error) {
if (error instanceof NotFoundError) {
credentialCache.set(credentialId, 'not-found');
return 'not-found' as const;
}
throw error;
}
};
const checkCredentialReference = async (
opIndex: number,
nodeMeta: NodeMeta,
credentialKey: string,
credentialId: string,
): Promise<CredentialValidationFailure | null> => {
let description;
try {
description = nodeTypes.getByNameAndVersion(nodeMeta.type, nodeMeta.typeVersion).description;
} catch {
return null;
}
const accepted = description.credentials?.find((c) => c.name === credentialKey);
if (!accepted) {
return fail(
opIndex,
`node type '${nodeMeta.type}' does not accept credential '${credentialKey}'`,
);
}
const credential = await lookupCredential(credentialId);
if (credential === 'not-found') {
return fail(opIndex, `credential '${credentialId}' not found or not accessible`);
}
if (credential.type !== credentialKey) {
return fail(
opIndex,
`credential '${credentialId}' is type '${credential.type}' but '${credentialKey}' is expected`,
);
}
return null;
};
for (let i = 0; i < operations.length; i++) {
const op = operations[i];
if (op.type === 'addNode') {
const nodeMeta: NodeMeta = { type: op.node.type, typeVersion: op.node.typeVersion };
if (op.node.credentials) {
for (const [key, value] of Object.entries(op.node.credentials)) {
if (!value.id) continue;
const failure = await checkCredentialReference(i, nodeMeta, key, value.id);
if (failure) return failure;
}
}
nameToNodeMeta.set(op.node.name, nodeMeta);
} else if (op.type === 'renameNode') {
const meta = nameToNodeMeta.get(op.oldName);
if (meta) {
nameToNodeMeta.delete(op.oldName);
nameToNodeMeta.set(op.newName, meta);
}
} else if (op.type === 'removeNode') {
nameToNodeMeta.delete(op.nodeName);
} else if (op.type === 'setNodeCredential') {
const meta = nameToNodeMeta.get(op.nodeName);
if (!meta) continue;
const failure = await checkCredentialReference(i, meta, op.credentialKey, op.credentialId);
if (failure) return failure;
}
}
return { ok: true };
}

View File

@ -38,7 +38,7 @@ To build n8n workflows, follow these steps in order:
7. Create: Call ${MCP_CREATE_WORKFLOW_FROM_CODE_TOOL.toolName} with the validated code to save the workflow to n8n. Include a short \`description\` (1-2 sentences) summarizing what the workflow does — this helps users find and understand their workflows.
8. Update: Call ${MCP_UPDATE_WORKFLOW_TOOL.toolName} with the workflow ID and validated code. Follow steps 2-6 to prepare the new code, then call update instead of create.
8. Update: Call ${MCP_UPDATE_WORKFLOW_TOOL.toolName} with the workflow ID and a list of operations (addNode, removeNode, updateNodeParameters, renameNode, addConnection, removeConnection, setNodeCredential, setNodePosition, setNodeDisabled, setWorkflowMetadata). The whole batch is atomic: if any op fails the workflow is unchanged.
9. Archive: Call ${MCP_ARCHIVE_WORKFLOW_TOOL.toolName} with the workflow ID.`;

View File

@ -1,10 +1,18 @@
import { type User, type SharedWorkflowRepository, WorkflowEntity } from '@n8n/db';
import type { WorkflowJSON } from '@n8n/workflow-sdk';
import z from 'zod';
import { USER_CALLED_MCP_TOOL_EVENT } from '../../mcp.constants';
import type { ToolDefinition, UserCalledMCPToolEventPayload } from '../../mcp.types';
import { CODE_BUILDER_VALIDATE_TOOL, MCP_UPDATE_WORKFLOW_TOOL } from './constants';
import { autoPopulateNodeCredentials, stripNullCredentialStubs } from './credentials-auto-assign';
import { MCP_UPDATE_WORKFLOW_TOOL } from './constants';
import { validateCredentialReferences } from './credential-validation';
import { autoPopulateNodeCredentials } from './credentials-auto-assign';
import {
applyOperations,
partialUpdateOperationSchema,
toWorkflowSlice,
type PartialUpdateOperation,
} from './workflow-operations';
import type { CollaborationService } from '@/collaboration/collaboration.service';
import type { CredentialsService } from '@/credentials/credentials.service';
@ -15,61 +23,59 @@ import { resolveNodeWebhookIds } from '@/workflow-helpers';
import type { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import type { WorkflowService } from '@/workflows/workflow.service';
import { getMcpWorkflow, getSdkReferenceHint } from '../workflow-validation.utils';
import { getMcpWorkflow } from '../workflow-validation.utils';
const MAX_OPERATIONS_PER_CALL = 100;
const inputSchema = {
workflowId: z.string().describe('The ID of the workflow to update'),
code: z
.string()
workflowId: z.string().describe('The ID of the workflow to update.'),
operations: z
.array(partialUpdateOperationSchema)
.min(1)
.max(MAX_OPERATIONS_PER_CALL)
.describe(
`Full TypeScript/JavaScript workflow code using the n8n Workflow SDK. Must be validated first with ${CODE_BUILDER_VALIDATE_TOOL.toolName}.`,
),
name: z
.string()
.max(128)
.optional()
.describe('Optional workflow name. If not provided, uses the name from the code.'),
description: z
.string()
.max(255)
.optional()
.describe(
'Short workflow description summarizing what it does (1-2 sentences, max 255 chars).',
`Ordered list of operations to apply (max ${MAX_OPERATIONS_PER_CALL}). Operations are applied atomically: if any operation fails (e.g. node not found, duplicate name), the whole batch is rejected and no changes are saved.`,
),
} satisfies z.ZodRawShape;
const outputSchema = {
workflowId: z.string().describe('The ID of the updated workflow'),
name: z.string().describe('The name of the updated workflow'),
nodeCount: z.number().describe('The number of nodes in the workflow'),
url: z.string().describe('The URL to open the workflow in n8n'),
workflowId: z.string(),
name: z.string(),
nodeCount: z.number(),
url: z.string(),
appliedOperations: z.number().describe('Number of operations applied.'),
autoAssignedCredentials: z
.array(
z.object({
nodeName: z.string().describe('The name of the node that had credentials auto-assigned'),
credentialName: z.string().describe('The name of the credential that was auto-assigned'),
credentialType: z.string().describe('The credential type that was auto-assigned'),
nodeName: z.string(),
credentialName: z.string(),
credentialType: z.string(),
}),
)
.describe('Credentials auto-assigned to nodes that were added in this update.'),
validationWarnings: z
.array(
z.object({
code: z.string(),
message: z.string(),
nodeName: z.string().optional(),
}),
)
.describe('List of credentials that were automatically assigned to nodes'),
note: z
.string()
.optional()
.describe(
'Additional notes about the workflow update, such as any nodes that were skipped during credential auto-assignment.',
),
hint: z
.string()
.optional()
.describe(
'Actionable hint for recovering from the error. When present, follow the suggested action before retrying.',
'Graph and JSON validation warnings on the resulting workflow. Use these to self-correct on the next call.',
),
note: z.string().optional(),
} satisfies z.ZodRawShape;
/**
* MCP tool that updates a workflow in n8n from validated SDK code.
* Parses the code, validates it, and updates the existing workflow.
* Only workflows that are available in MCP can be updated.
* MCP tool that updates a workflow by applying a small list of named operations
* (addNode, removeNode, updateNodeParameters, addConnection, ) directly to the
* stored JSON. The agent emits a tiny diff per call instead of re-sending the
* full SDK code, which keeps output-token cost roughly constant per edit.
*
* Graph + JSON validation runs on the resulting workflow before save, so the
* end-state safety net matches the create-from-code path; only the
* TS-code JSON parse step is skipped.
*/
export const createUpdateWorkflowTool = (
user: User,
@ -84,36 +90,36 @@ export const createUpdateWorkflowTool = (
): ToolDefinition<typeof inputSchema> => ({
name: MCP_UPDATE_WORKFLOW_TOOL.toolName,
config: {
description: `Update an existing workflow in n8n from validated SDK code. Parses the code into a workflow and saves the changes. Always validate with ${CODE_BUILDER_VALIDATE_TOOL.toolName} first.`,
description:
'Apply a small list of operations to an existing workflow (see the operations input schema for the supported op types). The whole batch is atomic: if any op fails the workflow is left unchanged.',
inputSchema,
outputSchema,
annotations: {
title: MCP_UPDATE_WORKFLOW_TOOL.displayTitle,
readOnlyHint: false,
destructiveHint: true,
idempotentHint: true,
idempotentHint: false,
openWorldHint: false,
},
},
handler: async ({
workflowId,
code,
name,
description,
operations,
}: {
workflowId: string;
code: string;
name?: string;
description?: string;
operations: PartialUpdateOperation[];
}) => {
const telemetryPayload: UserCalledMCPToolEventPayload = {
user_id: user.id,
tool_name: MCP_UPDATE_WORKFLOW_TOOL.toolName,
parameters: { workflowId, codeLength: code.length, hasName: !!name },
parameters: {
workflowId,
opCount: operations.length,
opTypes: operations.map((op) => op.type),
},
};
try {
// Fetch the workflow to check if it's available in MCP
const existingWorkflow = await getMcpWorkflow(
workflowId,
user,
@ -123,58 +129,73 @@ export const createUpdateWorkflowTool = (
await collaborationService.ensureWorkflowEditable(existingWorkflow.id);
const { ParseValidateHandler, stripImportStatements } = await import(
'@n8n/ai-workflow-builder'
const result = applyOperations(toWorkflowSlice(existingWorkflow), operations);
if (!result.success) {
throw new Error(result.error);
}
const credentialCheck = await validateCredentialReferences(
operations,
existingWorkflow,
user,
credentialsService,
nodeTypes,
);
const handler = new ParseValidateHandler({ generatePinData: false });
const strippedCode = stripImportStatements(code);
const result = await handler.parseAndValidate(strippedCode);
const workflowJson = result.workflow;
if (!credentialCheck.ok) {
throw new Error(credentialCheck.error);
}
const workflowUpdateData = new WorkflowEntity();
Object.assign(workflowUpdateData, {
name: name ?? workflowJson.name,
...(description !== undefined ? { description } : {}),
nodes: workflowJson.nodes,
connections: workflowJson.connections,
pinData: workflowJson.pinData,
meta: { ...workflowJson.meta, aiBuilderAssisted: true, builderVariant: 'mcp' },
name: result.workflow.name,
...(result.workflow.description !== undefined
? { description: result.workflow.description }
: {}),
nodes: result.workflow.nodes,
connections: result.workflow.connections,
meta: {
...(existingWorkflow.meta ?? {}),
aiBuilderAssisted: true,
builderVariant: 'mcp',
},
});
resolveNodeWebhookIds(workflowUpdateData, nodeTypes);
stripNullCredentialStubs(workflowUpdateData.nodes);
let credentialAssignments: Array<{
nodeName: string;
credentialName: string;
credentialType: string;
}> = [];
let skippedHttpNodes: string[] = [];
// Preserve user-configured credentials from the existing workflow.
// Match nodes by name + type so that auto-assign skips them.
const existingCredsByNode = new Map(
existingWorkflow.nodes.map((n) => [n.name, { type: n.type, credentials: n.credentials }]),
);
for (const node of workflowUpdateData.nodes) {
if (!node.credentials) {
const existing = existingCredsByNode.get(node.name);
if (existing?.type === node.type && existing.credentials) {
node.credentials = { ...existing.credentials };
}
}
}
if (result.addedNodeNames.length > 0) {
const addedNodeSet = new Set(result.addedNodeNames);
const addedNodes = workflowUpdateData.nodes.filter((n) => addedNodeSet.has(n.name));
const sharedWorkflow = await sharedWorkflowRepository.findOneOrFail({
where: { workflowId, role: 'workflow:owner' },
select: ['projectId'],
});
// Resolve the project ID from the workflow's owner relationship
const sharedWorkflow = await sharedWorkflowRepository.findOneOrFail({
where: { workflowId, role: 'workflow:owner' },
select: ['projectId'],
});
const { assignments: credentialAssignments, skippedHttpNodes } =
await autoPopulateNodeCredentials(
workflowUpdateData,
const autoAssign = await autoPopulateNodeCredentials(
{ ...workflowUpdateData, nodes: addedNodes },
user,
nodeTypes,
credentialsService,
sharedWorkflow.projectId,
);
credentialAssignments = autoAssign.assignments;
skippedHttpNodes = autoAssign.skippedHttpNodes;
}
const { ParseValidateHandler } = await import('@n8n/ai-workflow-builder');
const validator = new ParseValidateHandler({ generatePinData: false });
const validationWarnings = validator.validateJSON({
name: workflowUpdateData.name,
nodes: workflowUpdateData.nodes,
connections: workflowUpdateData.connections,
} as unknown as WorkflowJSON);
const updatedWorkflow = await workflowService.update(user, workflowUpdateData, workflowId, {
aiBuilderAssisted: true,
@ -200,7 +221,9 @@ export const createUpdateWorkflowTool = (
name: updatedWorkflow.name,
nodeCount: updatedWorkflow.nodes.length,
url: workflowUrl,
appliedOperations: operations.length,
autoAssignedCredentials: credentialAssignments,
validationWarnings,
note: skippedHttpNodes.length
? `HTTP Request nodes (${skippedHttpNodes.join(', ')}) were skipped during credential auto-assignment. Their credentials must be configured manually.`
: undefined,
@ -219,8 +242,7 @@ export const createUpdateWorkflowTool = (
};
telemetry.track(USER_CALLED_MCP_TOOL_EVENT, telemetryPayload);
const hint = getSdkReferenceHint(error);
const output = { error: errorMessage, ...(hint ? { hint } : {}) };
const output = { error: errorMessage };
return {
content: [{ type: 'text', text: JSON.stringify(output, null, 2) }],

View File

@ -0,0 +1,539 @@
import type {
IConnection,
IConnections,
INode,
INodeParameters,
IWorkflowBase,
NodeConnectionType,
} from 'n8n-workflow';
import { isSafeObjectProperty, NodeConnectionTypes } from 'n8n-workflow';
import { v4 as uuid } from 'uuid';
import { z } from 'zod';
const positionSchema = () =>
z
.array(z.number())
.length(2)
.transform((v): [number, number] => [v[0], v[1]])
.describe('Canvas position as [x, y]');
const credentialsSchema = z.record(
z.string(),
z.object({ id: z.string().optional(), name: z.string() }),
);
export const partialUpdateOperationSchema = z.discriminatedUnion('type', [
z.object({
type: z.literal('updateNodeParameters'),
nodeName: z.string().describe('Name of the existing node to update.'),
parameters: z
.record(z.string(), z.unknown())
.describe('Parameter object to merge into (or replace) the node parameters.'),
replace: z
.boolean()
.optional()
.describe(
'If true, replace the node parameters entirely with `parameters`. If false or omitted, deep-merge `parameters` into the existing parameters.',
),
}),
z.object({
type: z.literal('setNodeParameter'),
nodeName: z.string().describe('Name of the existing node to update.'),
path: z
.string()
.min(2)
.describe(
'JSON Pointer (RFC 6901) path to the parameter to set, e.g. "/jsonSchema" or "/options/systemMessage". Must start with "/". Intermediate objects are created on demand. Array indices are NOT supported — to change a value inside an array, set the whole array. Use this instead of `updateNodeParameters` when you only need to set one nested key — the payload stays small regardless of the rest of the parameters object.',
),
value: z
.unknown()
.refine((v) => v !== undefined, { message: 'value is required' })
.describe('Value to set at the path. Any defined JSON value.'),
}),
z.object({
type: z.literal('addNode'),
node: z
.object({
name: z.string().describe('Unique node name. Must not collide with an existing node.'),
type: z.string().describe('Fully qualified node type, e.g. "n8n-nodes-base.set".'),
typeVersion: z.number(),
parameters: z.record(z.string(), z.unknown()).optional(),
position: positionSchema().optional(),
credentials: credentialsSchema.optional(),
disabled: z.boolean().optional(),
notes: z.string().optional(),
id: z.string().optional().describe('Optional node id. Generated if omitted.'),
})
.describe('The node to add to the workflow.'),
}),
z.object({
type: z.literal('removeNode'),
nodeName: z
.string()
.describe('Name of the node to remove. All inbound and outbound connections are removed.'),
}),
z.object({
type: z.literal('renameNode'),
oldName: z.string(),
newName: z.string().describe('New unique node name.'),
}),
z.object({
type: z.literal('addConnection'),
source: z.string().describe('Name of the source node.'),
target: z.string().describe('Name of the target node.'),
sourceIndex: z
.number()
.int()
.nonnegative()
.optional()
.describe('Source output index. Default 0.'),
targetIndex: z
.number()
.int()
.nonnegative()
.optional()
.describe('Target input index. Default 0.'),
connectionType: z
.string()
.optional()
.describe('Connection type, e.g. "main" or "ai_languageModel". Default "main".'),
}),
z.object({
type: z.literal('removeConnection'),
source: z.string(),
target: z.string(),
sourceIndex: z.number().int().nonnegative().optional(),
targetIndex: z.number().int().nonnegative().optional(),
connectionType: z.string().optional(),
}),
z.object({
type: z.literal('setNodeCredential'),
nodeName: z.string(),
credentialKey: z
.string()
.describe('Credential key on the node, e.g. "slackApi" or "httpHeaderAuth".'),
credentialId: z.string(),
credentialName: z.string(),
}),
z.object({
type: z.literal('setNodePosition'),
nodeName: z.string(),
position: positionSchema(),
}),
z.object({
type: z.literal('setNodeDisabled'),
nodeName: z.string(),
disabled: z.boolean(),
}),
z.object({
type: z.literal('setWorkflowMetadata'),
name: z.string().max(128).optional(),
description: z.string().max(255).optional(),
}),
]);
export type PartialUpdateOperation = z.infer<typeof partialUpdateOperationSchema>;
interface WorkflowSlice {
name: string;
description?: string;
nodes: INode[];
connections: IConnections;
}
export interface ApplyOperationsSuccess {
success: true;
workflow: WorkflowSlice;
addedNodeNames: string[];
}
export interface ApplyOperationsFailure {
success: false;
error: string;
opIndex: number;
}
export type ApplyOperationsResult = ApplyOperationsSuccess | ApplyOperationsFailure;
const cloneWorkflow = (workflow: WorkflowSlice): WorkflowSlice => ({
name: workflow.name,
description: workflow.description,
nodes: workflow.nodes.map((node) => structuredClone(node)),
connections: structuredClone(workflow.connections),
});
const isPlainObject = (value: unknown): value is Record<string, unknown> =>
typeof value === 'object' && value !== null && !Array.isArray(value);
const sanitizeUnsafeKeys = (value: unknown): unknown => {
if (Array.isArray(value)) return value.map(sanitizeUnsafeKeys);
if (!isPlainObject(value)) return value;
const out: Record<string, unknown> = {};
for (const [key, v] of Object.entries(value)) {
if (!isSafeObjectProperty(key)) continue;
out[key] = sanitizeUnsafeKeys(v);
}
return out;
};
/**
* Decode a JSON Pointer path (RFC 6901) into safe property segments.
* Returns null if the path is malformed, empty, contains an empty segment,
* or contains an unsafe segment. The leading "/" is required.
* Array indices are not supported: numeric segments are treated as object keys,
* and descent into an array (or any non-object) fails at apply time.
*/
const parseJsonPointer = (path: string): string[] | null => {
if (!path.startsWith('/')) return null;
const tail = path.slice(1);
if (tail.length === 0) return null;
const rawSegments = tail.split('/');
const segments: string[] = [];
for (const raw of rawSegments) {
// RFC 6901: every '~' must be followed by '0' or '1'. Bare '~' or '~2' is malformed.
if (/~(?:[^01]|$)/.test(raw)) return null;
const seg = raw.replace(/~1/g, '/').replace(/~0/g, '~');
if (seg.length === 0 || !isSafeObjectProperty(seg)) return null;
segments.push(seg);
}
return segments;
};
/**
* Set `value` at `segments` inside `root`, creating intermediate objects on demand.
* Returns an error message if an intermediate segment exists but is not a plain object,
* otherwise mutates `root` in place and returns null.
*/
const setAtPointer = (
root: Record<string, unknown>,
segments: string[],
value: unknown,
): string | null => {
let cursor: Record<string, unknown> = root;
for (let i = 0; i < segments.length - 1; i++) {
const key = segments[i];
const next = cursor[key];
if (next === undefined) {
const child: Record<string, unknown> = {};
cursor[key] = child;
cursor = child;
} else if (isPlainObject(next)) {
cursor = next;
} else {
return `cannot descend into non-object at '/${segments.slice(0, i + 1).join('/')}'`;
}
}
cursor[segments[segments.length - 1]] = sanitizeUnsafeKeys(value);
return null;
};
const deepMerge = (
target: Record<string, unknown>,
source: Record<string, unknown>,
): Record<string, unknown> => {
const result: Record<string, unknown> = { ...target };
for (const [key, value] of Object.entries(source)) {
if (!isSafeObjectProperty(key)) continue;
const existing = Object.prototype.hasOwnProperty.call(result, key) ? result[key] : undefined;
if (isPlainObject(existing) && isPlainObject(value)) {
result[key] = deepMerge(existing, value);
} else {
result[key] = sanitizeUnsafeKeys(value);
}
}
return result;
};
/**
* Drop every inbound and outbound connection that references `nodeName`,
* pruning empty arrays/objects so the JSON shape stays clean.
*/
const removeConnectionsFor = (connections: IConnections, nodeName: string): void => {
delete connections[nodeName];
for (const sourceName of Object.keys(connections)) {
const byType = connections[sourceName];
for (const connectionType of Object.keys(byType)) {
const outputs = byType[connectionType];
for (let i = 0; i < outputs.length; i++) {
const targets = outputs[i];
if (!targets) continue;
outputs[i] = targets.filter((c) => c.node !== nodeName);
}
if (outputs.every((o) => !o || o.length === 0)) {
delete byType[connectionType];
}
}
if (Object.keys(byType).length === 0) {
delete connections[sourceName];
}
}
};
/**
* Rename every reference to `oldName` (both as connection key and as target).
*/
const renameInConnections = (connections: IConnections, oldName: string, newName: string): void => {
if (connections[oldName]) {
connections[newName] = connections[oldName];
delete connections[oldName];
}
for (const sourceName of Object.keys(connections)) {
const byType = connections[sourceName];
for (const connectionType of Object.keys(byType)) {
const outputs = byType[connectionType];
for (const targets of outputs) {
if (!targets) continue;
for (const conn of targets) {
if (conn.node === oldName) conn.node = newName;
}
}
}
}
};
const ensureOutputSlot = (
connections: IConnections,
source: string,
connectionType: string,
sourceIndex: number,
): IConnection[] => {
const byType = (connections[source] ??= {});
const outputs = (byType[connectionType] ??= []);
while (outputs.length <= sourceIndex) outputs.push(null);
const slot = outputs[sourceIndex] ?? [];
outputs[sourceIndex] = slot;
return slot;
};
const pruneConnectionShape = (
connections: IConnections,
source: string,
connectionType: string,
): void => {
const byType = connections[source];
if (!byType) return;
const outputs = byType[connectionType];
if (outputs && outputs.every((o) => !o || o.length === 0)) {
delete byType[connectionType];
}
if (Object.keys(byType).length === 0) {
delete connections[source];
}
};
const fail = (opIndex: number, message: string): ApplyOperationsFailure => ({
success: false,
error: `Operation ${opIndex} failed: ${message}`,
opIndex,
});
/**
* Apply a sequence of partial-update operations to a workflow slice atomically.
* Returns the mutated clone on success, or the first failure with the offending op index.
*
* The function never mutates the input.
*/
export function applyOperations(
input: WorkflowSlice,
operations: PartialUpdateOperation[],
): ApplyOperationsResult {
const workflow = cloneWorkflow(input);
const nodeByName = new Map(workflow.nodes.map((n) => [n.name, n]));
const addedNodeNames = new Set<string>();
for (let i = 0; i < operations.length; i++) {
const op = operations[i];
switch (op.type) {
case 'updateNodeParameters': {
const node = nodeByName.get(op.nodeName);
if (!node) return fail(i, `node '${op.nodeName}' not found`);
const sanitized = sanitizeUnsafeKeys(op.parameters) as Record<string, unknown>;
const merged = op.replace
? sanitized
: deepMerge((node.parameters ?? {}) as Record<string, unknown>, sanitized);
node.parameters = merged as INodeParameters;
break;
}
case 'setNodeParameter': {
const node = nodeByName.get(op.nodeName);
if (!node) return fail(i, `node '${op.nodeName}' not found`);
const segments = parseJsonPointer(op.path);
if (!segments) {
return fail(i, `path '${op.path}' is invalid or contains unsafe segments`);
}
const params = (node.parameters ?? {}) as Record<string, unknown>;
const setError = setAtPointer(params, segments, op.value);
if (setError) return fail(i, setError);
node.parameters = params as INodeParameters;
break;
}
case 'addNode': {
if (!isSafeObjectProperty(op.node.name)) {
return fail(i, `node name '${op.node.name}' is not allowed`);
}
if (nodeByName.has(op.node.name)) {
return fail(i, `a node named '${op.node.name}' already exists`);
}
const node: INode = {
id: op.node.id ?? uuid(),
name: op.node.name,
type: op.node.type,
typeVersion: op.node.typeVersion,
position: op.node.position ?? [0, 0],
parameters: (sanitizeUnsafeKeys(op.node.parameters ?? {}) ?? {}) as INodeParameters,
};
if (op.node.credentials) {
const credentialEntries: Array<[string, { id: string | null; name: string }]> = [];
for (const [key, cred] of Object.entries(op.node.credentials)) {
if (!isSafeObjectProperty(key)) {
return fail(i, `credential key '${key}' is not allowed`);
}
credentialEntries.push([key, { id: cred.id ?? null, name: cred.name }]);
}
node.credentials = Object.fromEntries(credentialEntries);
}
if (op.node.disabled !== undefined) node.disabled = op.node.disabled;
if (op.node.notes !== undefined) node.notes = op.node.notes;
workflow.nodes.push(node);
nodeByName.set(node.name, node);
addedNodeNames.add(node.name);
break;
}
case 'removeNode': {
const node = nodeByName.get(op.nodeName);
if (!node) return fail(i, `node '${op.nodeName}' not found`);
workflow.nodes.splice(workflow.nodes.indexOf(node), 1);
nodeByName.delete(op.nodeName);
removeConnectionsFor(workflow.connections, op.nodeName);
addedNodeNames.delete(op.nodeName);
break;
}
case 'renameNode': {
if (op.oldName === op.newName) break;
if (!isSafeObjectProperty(op.newName)) {
return fail(i, `node name '${op.newName}' is not allowed`);
}
const node = nodeByName.get(op.oldName);
if (!node) return fail(i, `node '${op.oldName}' not found`);
if (nodeByName.has(op.newName)) {
return fail(i, `a node named '${op.newName}' already exists`);
}
node.name = op.newName;
nodeByName.delete(op.oldName);
nodeByName.set(op.newName, node);
renameInConnections(workflow.connections, op.oldName, op.newName);
if (addedNodeNames.delete(op.oldName)) addedNodeNames.add(op.newName);
break;
}
case 'addConnection': {
if (!nodeByName.has(op.source)) {
return fail(i, `source node '${op.source}' not found`);
}
if (!nodeByName.has(op.target)) {
return fail(i, `target node '${op.target}' not found`);
}
const connectionType = (op.connectionType ??
NodeConnectionTypes.Main) as NodeConnectionType;
if (!isSafeObjectProperty(op.source) || !isSafeObjectProperty(connectionType)) {
return fail(i, 'connection name is not allowed');
}
const sourceIndex = op.sourceIndex ?? 0;
const targetIndex = op.targetIndex ?? 0;
const slot = ensureOutputSlot(workflow.connections, op.source, connectionType, sourceIndex);
const exists = slot.some(
(c) => c.node === op.target && c.type === connectionType && c.index === targetIndex,
);
if (!exists) {
slot.push({ node: op.target, type: connectionType, index: targetIndex });
}
break;
}
case 'removeConnection': {
const connectionType = (op.connectionType ??
NodeConnectionTypes.Main) as NodeConnectionType;
const sourceIndex = op.sourceIndex ?? 0;
const targetIndex = op.targetIndex ?? 0;
const byType = workflow.connections[op.source];
const outputs = byType?.[connectionType];
const slot = outputs?.[sourceIndex];
if (!slot) {
return fail(i, `no '${connectionType}' connection from '${op.source}'`);
}
const filtered = slot.filter(
(c) => !(c.node === op.target && c.type === connectionType && c.index === targetIndex),
);
if (filtered.length === slot.length) {
return fail(
i,
`connection from '${op.source}'[${sourceIndex}] to '${op.target}'[${targetIndex}] does not exist`,
);
}
outputs[sourceIndex] = filtered;
pruneConnectionShape(workflow.connections, op.source, connectionType);
break;
}
case 'setNodeCredential': {
const node = nodeByName.get(op.nodeName);
if (!node) return fail(i, `node '${op.nodeName}' not found`);
if (!isSafeObjectProperty(op.credentialKey)) {
return fail(i, `credential key '${op.credentialKey}' is not allowed`);
}
node.credentials = {
...(node.credentials ?? {}),
[op.credentialKey]: { id: op.credentialId, name: op.credentialName },
};
break;
}
case 'setNodePosition': {
const node = nodeByName.get(op.nodeName);
if (!node) return fail(i, `node '${op.nodeName}' not found`);
node.position = op.position;
break;
}
case 'setNodeDisabled': {
const node = nodeByName.get(op.nodeName);
if (!node) return fail(i, `node '${op.nodeName}' not found`);
node.disabled = op.disabled;
break;
}
case 'setWorkflowMetadata': {
if (op.name !== undefined) workflow.name = op.name;
if (op.description !== undefined) workflow.description = op.description;
break;
}
default: {
op satisfies never;
return fail(i, 'unknown operation type');
}
}
}
return { success: true, workflow, addedNodeNames: [...addedNodeNames] };
}
/**
* Pick only the fields the partial-update path needs from a workflow entity.
* Keeps the surface explicit and avoids mutating the loaded entity.
*/
export function toWorkflowSlice(workflow: IWorkflowBase): WorkflowSlice {
return {
name: workflow.name ?? '',
description: (workflow as { description?: string }).description,
nodes: workflow.nodes,
connections: workflow.connections,
};
}