mirror of
https://github.com/n8n-io/n8n.git
synced 2026-05-13 00:20:27 +02:00
Compare commits
173 Commits
master
...
n8n@1.123.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c8536ecf3 | ||
|
|
0d62a137eb | ||
|
|
db3b57b040 | ||
|
|
77eb53363d | ||
|
|
f8845745a6 | ||
|
|
6aaf436435 | ||
|
|
4af49f1d9e | ||
|
|
271af23ef3 | ||
|
|
d67c7144f9 | ||
|
|
55cad3babb | ||
|
|
880e40cde6 | ||
|
|
eb752033f2 | ||
|
|
31f55085e2 | ||
|
|
f4e941d394 | ||
|
|
1d6791179f | ||
|
|
f193f3133d | ||
|
|
5ce4b5d46c | ||
|
|
3d5cde8579 | ||
|
|
caa65d8e9b | ||
|
|
f07ca0d5a0 | ||
|
|
e67b44b0e0 | ||
|
|
90cb7227cf | ||
|
|
0e626768ed | ||
|
|
a6b3e819bb | ||
|
|
c4b79637b7 | ||
|
|
e7d95055d1 | ||
|
|
6bd24636ee | ||
|
|
808bc6a469 | ||
|
|
bf646761eb | ||
|
|
e633500f18 | ||
|
|
506cd5858e | ||
|
|
302186db5d | ||
|
|
27cbfbfc92 | ||
|
|
6e66377074 | ||
|
|
1fe07e97b0 | ||
|
|
ea12d022be | ||
|
|
22d02e5ad6 | ||
|
|
581a955e1f | ||
|
|
bf86a98163 | ||
|
|
f3c0b2c0cb | ||
|
|
48fdd9e947 | ||
|
|
a4d6a6d2f2 | ||
|
|
d9924ab38f | ||
|
|
88264ed350 | ||
|
|
a486719f15 | ||
|
|
13de8dfccc | ||
|
|
5504844633 | ||
|
|
7c8ff45509 | ||
|
|
86191fee1c | ||
|
|
fe6b0a8b36 | ||
|
|
ebf7721a59 | ||
|
|
4849d95b4b | ||
|
|
643e50524d | ||
|
|
d60437662b | ||
|
|
cd3bdce623 | ||
|
|
10aa98fceb | ||
|
|
9b1d4564e7 | ||
|
|
aef8c80491 | ||
|
|
b8accfedbb | ||
|
|
49d7e16028 | ||
|
|
0a4d66685a | ||
|
|
1479aab2d3 | ||
|
|
7762bef712 | ||
|
|
b30ed4c662 | ||
|
|
cebe811fe2 | ||
|
|
b045eb9b5f | ||
|
|
e0315d396f | ||
|
|
d1061826e9 | ||
|
|
300f429d9d | ||
|
|
9ccc1888f2 | ||
|
|
20c4ba9c1a | ||
|
|
9e417b9eaa | ||
|
|
db31c46f2a | ||
|
|
b5138c9c98 | ||
|
|
c39496eda9 | ||
|
|
dba9864e00 | ||
|
|
ad1023b57e | ||
|
|
4e5e9ff133 | ||
|
|
c6520e4e87 | ||
|
|
7f36e8e6d8 | ||
|
|
6a9eccbfb9 | ||
|
|
b00dcd9221 | ||
|
|
70c573c882 | ||
|
|
46dd25439c | ||
|
|
8d8681403c | ||
|
|
13ec09b159 | ||
|
|
911d3771ce | ||
|
|
740a518bf7 | ||
|
|
264db125ea | ||
|
|
7860896909 | ||
|
|
298c673bcb | ||
|
|
a8ddcea5f5 | ||
|
|
30383d8613 | ||
|
|
8ab4492e8c | ||
|
|
61fd8625d7 | ||
|
|
d05fc24fc3 | ||
|
|
7c81ee3152 | ||
|
|
e6737d24a8 | ||
|
|
afe3223255 | ||
|
|
019b462d2c | ||
|
|
465209a377 | ||
|
|
95173c5ecf | ||
|
|
25f644f7f3 | ||
|
|
512f50fa61 | ||
|
|
59ca0a2d9b | ||
|
|
1d5372ff93 | ||
|
|
a49067d6ba | ||
|
|
918bdcc286 | ||
|
|
b1b39bee74 | ||
|
|
3a3e4c6cc2 | ||
|
|
528ad6b982 | ||
|
|
27383c6d24 | ||
|
|
9262607282 | ||
|
|
7c2eb8cbdd | ||
|
|
148236390b | ||
|
|
45179a2c6f | ||
|
|
5dc3e4171b | ||
|
|
b1460c7cc1 | ||
|
|
dc3706ae55 | ||
|
|
b6059a120b | ||
|
|
f632578f8b | ||
|
|
8ea741a2e3 | ||
|
|
abd04226c1 | ||
|
|
57aad1e856 | ||
|
|
e0cfbdc48b | ||
|
|
f7cf22f92c | ||
|
|
5a3d556ce2 | ||
|
|
cbbd64f0eb | ||
|
|
6eb2bac670 | ||
|
|
e6313f6364 | ||
|
|
8a5d4d5746 | ||
|
|
aed5416484 | ||
|
|
97365caf25 | ||
|
|
1b5ccd8dee | ||
|
|
ae8097e60e | ||
|
|
00b2b3b463 | ||
|
|
4900d89650 | ||
|
|
7c66e72450 | ||
|
|
1448293d48 | ||
|
|
5102991310 | ||
|
|
e6fe97cb31 | ||
|
|
a03d4efa3b | ||
|
|
6f8dccf537 | ||
|
|
64bcbf450b | ||
|
|
8be76b3c5a | ||
|
|
7f53cbbc6c | ||
|
|
3730172e36 | ||
|
|
84fb27aa1d | ||
|
|
bf19e8d9a0 | ||
|
|
db20ecfe51 | ||
|
|
883c409be9 | ||
|
|
e924f07e62 | ||
|
|
14f70d3416 | ||
|
|
39f17f5fb3 | ||
|
|
177000bc89 | ||
|
|
3d2193278c | ||
|
|
158a3c35d3 | ||
|
|
d0e3d69c13 | ||
|
|
7bd2b8d617 | ||
|
|
f191116594 | ||
|
|
098fc046b4 | ||
|
|
173fa0868a | ||
|
|
56b43c8b73 | ||
|
|
4560a305dd | ||
|
|
9cdc03e049 | ||
|
|
158afd1d15 | ||
|
|
cc380559d3 | ||
|
|
d51b779ed3 | ||
|
|
7f27e06b22 | ||
|
|
0fe86822ef | ||
|
|
86d9ce1ca2 | ||
|
|
573a0a34aa | ||
|
|
7de2a7a33b |
|
|
@ -1 +0,0 @@
|
|||
../.claude/plugins/n8n/skills
|
||||
30
.bundlemonrc.json
Normal file
30
.bundlemonrc.json
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"baseDir": "packages/frontend/editor-ui/dist",
|
||||
"defaultCompression": "gzip",
|
||||
"reportOutput": [
|
||||
[
|
||||
"github",
|
||||
{
|
||||
"checkRun": true,
|
||||
"commitStatus": "off",
|
||||
"prComment": true
|
||||
}
|
||||
]
|
||||
],
|
||||
"files": [
|
||||
{
|
||||
"path": "*.wasm",
|
||||
"friendlyName": "WASM Dependencies"
|
||||
}
|
||||
],
|
||||
"groups": [
|
||||
{
|
||||
"groupName": "Editor UI - Total JS Size",
|
||||
"path": "**/*.js"
|
||||
},
|
||||
{
|
||||
"groupName": "Editor UI - Total CSS Size",
|
||||
"path": "**/*.css"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
# Claude Code Configuration
|
||||
|
||||
This directory contains shared Claude Code configuration for the n8n team.
|
||||
|
||||
All skills, agents, and commands live under the `n8n` plugin at
|
||||
`.claude/plugins/n8n/` for `n8n:` namespacing. See
|
||||
[plugin README](plugins/n8n/README.md) for full details.
|
||||
|
||||
## Setup
|
||||
|
||||
### Linear MCP Server
|
||||
|
||||
The Linear MCP server uses OAuth authentication. To connect:
|
||||
|
||||
1. Start Claude Code in this repository
|
||||
2. Run `/mcp` command
|
||||
3. Click the Linear authentication link in your browser
|
||||
4. Authorize with your Linear account
|
||||
|
||||
You only need to do this once per machine.
|
||||
|
||||
### Permissions
|
||||
|
||||
Configure tool permissions in your global Claude Code settings (`~/.claude/settings.json`), not in this repo. This allows each developer to customize their own approval preferences.
|
||||
|
||||
To auto-approve Linear MCP tools, add to your global settings:
|
||||
|
||||
```json
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"mcp__linear-server__*"
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Note:** For GitHub/git operations, we use `gh` CLI and `git` commands instead of GitHub MCP.
|
||||
|
||||
## Plugin
|
||||
|
||||
All skills, commands, and agents are auto-discovered from
|
||||
`.claude/plugins/n8n/`. They get the `n8n:` namespace prefix automatically
|
||||
(e.g. `n8n:create-pr`, `/n8n:plan`, `n8n:developer`).
|
||||
|
||||
See [plugin README](plugins/n8n/README.md) for structure and design decisions.
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"owner": {
|
||||
"name": "n8n"
|
||||
},
|
||||
"plugins": [
|
||||
{
|
||||
"name": "n8n",
|
||||
"source": "./"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "0.2.0",
|
||||
"description": "n8n Claude Code plugin — shared skills, commands, and agents for n8n development"
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
# n8n Claude Code Plugin
|
||||
|
||||
Shared skills, commands, and agents for n8n development. All items are
|
||||
namespaced under `n8n:` to avoid collisions with personal or third-party
|
||||
plugins.
|
||||
|
||||
## Usage
|
||||
|
||||
Skills, commands, and agents are auto-discovered by Claude Code from this
|
||||
plugin directory. Everything gets the `n8n:` namespace prefix automatically.
|
||||
|
||||
| Type | Example | Invocation |
|
||||
|------|---------|------------|
|
||||
| Skill | `skills/create-pr/SKILL.md` | `n8n:create-pr` |
|
||||
| Command | `commands/plan.md` | `/n8n:plan PAY-XXX` |
|
||||
| Agent | `agents/developer.md` | `n8n:developer` |
|
||||
|
||||
## Plugin Structure
|
||||
|
||||
```
|
||||
.claude/plugins/n8n/
|
||||
├── .claude-plugin/
|
||||
│ ├── marketplace.json # Marketplace manifest
|
||||
│ └── plugin.json # Plugin identity
|
||||
├── agents/
|
||||
│ └── <name>.md # → n8n:<name> agent
|
||||
├── commands/
|
||||
│ └── <name>.md # → /n8n:<name> command
|
||||
├── skills/
|
||||
│ └── <name>/SKILL.md # → n8n:<name> skill
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## Design Decisions
|
||||
|
||||
### Why a plugin instead of standalone skills?
|
||||
|
||||
To get the `n8n:` namespace prefix, avoiding collisions with personal or
|
||||
third-party plugins. Claude Code only supports colon-namespaced items through
|
||||
the plugin system — standalone `.claude/skills/` entries cannot be namespaced.
|
||||
|
||||
### Known Issues
|
||||
|
||||
- Plugin skill namespacing requires omitting the `name` field from SKILL.md
|
||||
frontmatter due to a [Claude Code bug](https://github.com/anthropics/claude-code/issues/17271).
|
||||
The directory name is used as the skill identifier instead.
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
---
|
||||
name: developer
|
||||
description: Use this agent for any n8n development task - frontend (Vue 3), backend (Node.js/TypeScript), workflow engine, node creation, or full-stack features. The agent automatically applies n8n conventions and best practices. Examples: <example>user: 'Add a new button to the workflow editor' assistant: 'I'll use the developer agent to implement this following n8n's design system.'</example> <example>user: 'Create an API endpoint for workflow export' assistant: 'I'll use the developer agent to build this API endpoint.'</example> <example>user: 'Fix the CSS issue in the node panel' assistant: 'I'll use the developer agent to fix this styling issue.'</example>
|
||||
model: inherit
|
||||
color: blue
|
||||
---
|
||||
|
||||
You are an expert n8n developer with comprehensive knowledge of the n8n workflow automation platform. You handle both frontend (Vue 3 + Pinia + Design System) and backend (Node.js + TypeScript + Express + TypeORM) development.
|
||||
|
||||
## Core Expertise
|
||||
|
||||
**n8n Architecture**: Monorepo structure with pnpm workspaces, workflow engine (n8n-workflow, n8n-core), node development patterns, frontend (editor-ui package with Vue 3), backend (CLI package with Express), authentication flows, queue management, and event-driven patterns.
|
||||
|
||||
**Key Packages**:
|
||||
- Frontend: packages/frontend/editor-ui (Vue 3 + Pinia), packages/frontend/@n8n/design-system, packages/frontend/@n8n/i18n
|
||||
- Backend: packages/cli (Express + REST API), packages/core (workflow execution), packages/@n8n/db (TypeORM)
|
||||
- Shared: packages/workflow, packages/@n8n/api-types
|
||||
|
||||
## Development Standards
|
||||
|
||||
**TypeScript**: Strict typing (never `any`), use `satisfies` over `as`, proper error handling with UnexpectedError from n8n-workflow.
|
||||
|
||||
**Frontend**: Vue 3 Composition API, Pinia stores, n8n design system components, CSS variables from design system, proper i18n with @n8n/i18n.
|
||||
|
||||
**Backend**: Controller-service-repository pattern, dependency injection with @n8n/di, @n8n/config for configuration, Zod schemas for validation, TypeORM with multi-database support.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. **Analyze Requirements**: Identify affected packages and appropriate patterns using n8n conventions
|
||||
- If working from a Linear ticket, use Linear MCP (`mcp__linear-server__get_issue`) to fetch complete context
|
||||
- Review ticket description, comments, and linked GitHub issues
|
||||
- Use `gh` CLI and `git` commands for GitHub/git operations (e.g., `gh pr view`, `git log`)
|
||||
2. **Plan Implementation**: Outline steps and dependencies
|
||||
3. **Follow Patterns**: Apply n8n architectural patterns consistently
|
||||
4. **Ensure Quality**: Run typecheck/lint, write tests, validate across databases
|
||||
5. **Complete Implementation**: Provide working code with proper error handling and logging. Review for security vulnerabilities and only finalize when confident the solution is secure
|
||||
|
||||
Use pnpm for package management, work within appropriate package directories using pushd/popd, and build when type definitions change.
|
||||
|
||||
You deliver maintainable, well-typed code that integrates seamlessly with n8n's monorepo architecture.
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
---
|
||||
name: linear-issue-triager
|
||||
description: Use this agent proactively when a Linear issue is created, updated, or needs comprehensive analysis. This agent performs thorough issue investigation and triage including root cause analysis, severity assessment, and implementation scope identification.
|
||||
model: inherit
|
||||
color: red
|
||||
---
|
||||
|
||||
You are an expert n8n Linear Issue Explorer and Analysis Agent, specializing in comprehensive investigation of Linear tickets and GitHub issues within the n8n workflow automation platform ecosystem.
|
||||
|
||||
**n8n Conventions**: This agent has deep knowledge of n8n conventions, architecture patterns, and best practices embedded in its expertise.
|
||||
|
||||
Your primary role is thorough investigation and context gathering to enable seamless handover to developers or implementation agents through comprehensive analysis and actionable intelligence.
|
||||
|
||||
## Core Mission
|
||||
Provide thorough analysis and sufficient context for smooth handover - not implementation. Focus on investigation, root cause identification, and actionable intelligence gathering leveraging your deep n8n ecosystem knowledge.
|
||||
|
||||
## Investigation Capabilities
|
||||
|
||||
### 1. Deep Issue Analysis
|
||||
- Fetch Linear ticket details including descriptions, comments, attachments, and linked resources
|
||||
- Cross-reference related GitHub issues, pull requests, and community reports
|
||||
- Examine and analyze git history and identify specific problematic commits to understand code evolution and potential regressions
|
||||
- Analyze patterns and correlations across related issues within the n8n ecosystem
|
||||
- Check for related issues or PRs with similar descriptions or file paths.
|
||||
|
||||
### 2. Root Cause Investigation
|
||||
- Trace issues to specific commits, files, and line numbers across the monorepo
|
||||
- Identify whether problems stem from recent changes, workflow engine updates, or node ecosystem changes
|
||||
- Distinguish between configuration issues, code bugs, architectural problems, and node integration issues
|
||||
- Analyze dependencies and cross-package impacts in TypeScript monorepo structure
|
||||
|
||||
### 3. Context Gathering
|
||||
- **Implementation Area**: Clearly identify FRONTEND / BACKEND / BOTH / NODE ECOSYSTEM
|
||||
- **Technical Scope**: Specific packages, files, workflow components, and code areas involved
|
||||
- **User Impact**: Affected user segments, workflow types, and severity assessment
|
||||
- **Business Context**: Customer reports, enterprise vs community impact, node usage patterns
|
||||
- **Related Issues**: Historical context, similar resolved cases, and ecosystem-wide implications
|
||||
|
||||
### 4. Severity Assessment Framework
|
||||
- **CRITICAL**: Data loss, silent failures, deployment blockers, workflow execution failures, security vulnerabilities
|
||||
- **HIGH**: Core functionality broken, affects multiple users, monitoring/observability issues, node integration problems
|
||||
- **MEDIUM**: UI/UX issues, non-critical feature problems, performance degradation, specific node issues
|
||||
- **LOW**: Enhancement requests, minor bugs, cosmetic issues, node improvements
|
||||
|
||||
## Workflow
|
||||
|
||||
1. **Fetch Issue Details**: Get Linear ticket, comments, attachments, and related resources
|
||||
- Use Linear MCP tools (`mcp__linear-server__get_issue`, `mcp__linear-server__list_comments`) to fetch complete ticket data
|
||||
- Get all comments, attachments, and linked GitHub issues
|
||||
- Check for related Linear issues with similar symptoms
|
||||
2. **Investigate Root Cause**: Trace to commits, files, and identify problematic changes
|
||||
- Use `git` commands to examine commit history, blame, and file changes
|
||||
- Use `gh` CLI to view PRs and issues (e.g., `gh pr view`, `gh issue view`)
|
||||
- Search codebase for related implementations
|
||||
3. **Assess Severity**: Apply framework to determine priority level
|
||||
4. **Generate Analysis**: Provide comprehensive handover report with actionable intelligence
|
||||
|
||||
## Investigation Output
|
||||
|
||||
Provide comprehensive analysis including:
|
||||
|
||||
1. **Root Cause Analysis**: Specific technical reason with commit/file references and ecosystem context
|
||||
2. **Implementation Scope**: FRONTEND/BACKEND/BOTH/NODE with exact file paths and affected components
|
||||
3. **Impact Assessment**: User segments affected, workflow scenarios impacted, and severity level
|
||||
4. **Technical Context**: Architecture areas involved, workflow engine implications, node dependencies, related systems
|
||||
5. **Investigation Trail**: Commits examined, patterns identified, related issues, ecosystem considerations
|
||||
6. **Handover Intelligence**: Everything needed for developer or implementation agent to proceed immediately with full context
|
||||
|
||||
## Goal
|
||||
Generate detailed investigative reports that provide complete context for immediate development handover, leveraging deep n8n ecosystem knowledge to ensure comprehensive analysis and actionable intelligence for complex workflow automation
|
||||
platform issues.
|
||||
|
||||
## Important
|
||||
**DO NOT post triage results to Linear.** Only generate the analysis as output. The user will decide what to share with the Linear ticket.
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
---
|
||||
description: Plan n8n Linear ticket implementation
|
||||
argument-hint: [PAY-XXXX | DEV-XXXX | ENG-XXXX]
|
||||
allowed-tools: Task, Agent, Read, Glob, Grep, Write, Bash
|
||||
---
|
||||
|
||||
Launch a Plan agent (built-in) to research and design an implementation plan for Linear issue $ARGUMENTS.
|
||||
|
||||
The agent should:
|
||||
1. Fetch and analyze the Linear ticket using Linear MCP
|
||||
2. Identify affected packages and files
|
||||
3. Design implementation approach following n8n conventions
|
||||
4. Define testing strategy
|
||||
5. Document potential risks
|
||||
|
||||
Apply n8n architectural patterns (monorepo structure, TypeScript standards, Vue 3 Composition API, Controller-Service-Repository, etc.).
|
||||
|
||||
The agent should return the full plan as text (it cannot write files). After receiving the result, save it to `.claude/plans/<TICKET-ID>.md` (e.g. `.claude/plans/PAY-1234.md`). Create the directory if needed. This directory is gitignored.
|
||||
|
||||
The plan file should contain:
|
||||
- The ticket title and link
|
||||
- A summary of the ticket
|
||||
- The full implementation plan
|
||||
- Testing strategy
|
||||
- Risks and open questions
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
---
|
||||
description: Triage Linear issue with comprehensive analysis
|
||||
argument-hint: [PAY-XXXX | DEV-XXXX | ENG-XXXX]
|
||||
allowed-tools: Task
|
||||
---
|
||||
|
||||
Use the n8n:linear-issue-triager agent to triage Linear issue $ARGUMENTS.
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// Tracks n8n plugin skill usage by sending anonymized analytics.
|
||||
// Called as a PostToolUse hook for the Skill tool.
|
||||
// Receives JSON on stdin: { "tool_name": "Skill", "tool_input": { "skill": "n8n:foo", ... }, "tool_response": ... }
|
||||
|
||||
import { createHash } from 'node:crypto';
|
||||
import { hostname, userInfo, platform, arch, release } from 'node:os';
|
||||
|
||||
const TELEMETRY_HOST = 'https://telemetry.n8n.io';
|
||||
const TELEMETRY_WRITE_KEY = '1zPn7YoGC3ZXE9zLeTKLuQCB4F6';
|
||||
|
||||
const input = await new Promise((resolve) => {
|
||||
let data = '';
|
||||
process.stdin.on('data', (chunk) => (data += chunk));
|
||||
process.stdin.on('end', () => resolve(data));
|
||||
});
|
||||
|
||||
const { tool_input: toolInput } = JSON.parse(input);
|
||||
const skillName = toolInput?.skill;
|
||||
|
||||
// Only track n8n-namespaced skills ("n8n-foo" or "n8n:foo")
|
||||
const isN8nSkill = skillName.startsWith('n8n:') || skillName.startsWith('n8n-');
|
||||
if (!skillName || !isN8nSkill) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Generate anonymized user ID: SHA-256 of (username + hostname + OS + arch + release)
|
||||
const raw = `${userInfo().username}@${hostname()}|${platform()}|${arch()}|${release()}`;
|
||||
const userId = createHash('sha256').update(raw).digest('hex');
|
||||
|
||||
const payload = JSON.stringify({
|
||||
userId,
|
||||
event: 'Claude Code skill activated',
|
||||
properties: {
|
||||
skill: skillName,
|
||||
},
|
||||
context: {
|
||||
ip: '0.0.0.0',
|
||||
},
|
||||
});
|
||||
|
||||
// Send to telemetry HTTP Track API (fire-and-forget, never block the user)
|
||||
try {
|
||||
await fetch(`${TELEMETRY_HOST}/v1/track`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Basic ${Buffer.from(`${TELEMETRY_WRITE_KEY}:`).toString('base64')}`,
|
||||
},
|
||||
body: payload,
|
||||
});
|
||||
} catch {
|
||||
// Silently ignore network errors
|
||||
}
|
||||
|
|
@ -1,150 +0,0 @@
|
|||
---
|
||||
description: >-
|
||||
Checks if a community pull request is ready for human review. Verifies CLA
|
||||
signature, PR title format, description completeness, test coverage, and
|
||||
cubic-dev-ai issues. Use when given a PR number or branch name to review,
|
||||
or when the user says /community-pr-review, /pr-review, or asks to check if
|
||||
a PR is ready for review.
|
||||
allowed-tools: Bash(gh:*), Bash(git:*), Read, Glob, Grep
|
||||
---
|
||||
|
||||
# Community PR Review
|
||||
|
||||
Given a PR number or branch name, determine whether it is ready for human review.
|
||||
|
||||
## Steps
|
||||
|
||||
### 1. Resolve the PR
|
||||
|
||||
If given a branch name, find the PR number first:
|
||||
```bash
|
||||
gh pr view <branch> --repo n8n-io/n8n --json number --jq .number
|
||||
```
|
||||
|
||||
### 2. Fetch PR data
|
||||
|
||||
```bash
|
||||
gh pr view <number> --repo n8n-io/n8n \
|
||||
--json number,title,body,author,headRefName,headRefOid,files,isDraft,state
|
||||
```
|
||||
|
||||
Fetch in parallel:
|
||||
|
||||
```bash
|
||||
# CLA commit status (primary signal) — statuses are newest-first; use the first returned entry
|
||||
gh api --paginate "repos/n8n-io/n8n/commits/<headRefOid>/statuses" \
|
||||
--jq '[.[] | select(.context == "license/cla") | {state, description}] | first'
|
||||
|
||||
# CLAassistant issue comment (fallback when no commit status) — use the last returned entry
|
||||
gh api --paginate "repos/n8n-io/n8n/issues/<number>/comments" \
|
||||
--jq '[.[] | select(.user.login == "CLAassistant") | .body] | last'
|
||||
|
||||
# cubic-dev-ai PR review comments (streamed so results concatenate cleanly across pages)
|
||||
gh api --paginate "repos/n8n-io/n8n/pulls/<number>/comments" \
|
||||
--jq '.[] | select(.user.login == "cubic-dev-ai[bot]") | {body: .body, path: .path}'
|
||||
```
|
||||
|
||||
### 3. Run the five checks
|
||||
|
||||
#### A. CLA signed
|
||||
|
||||
Check the `license/cla` commit status first; fall back to the CLAassistant comment if no status exists.
|
||||
|
||||
**Commit status** (`context == "license/cla"`):
|
||||
- `state: "success"` → ✅ signed
|
||||
- `state: "failure"` or `state: "error"` → ❌ not signed
|
||||
- `state: "pending"` → ⏳ pending
|
||||
- Not present → fall back to comment
|
||||
|
||||
**CLAassistant issue comment** (fallback):
|
||||
- Body contains `"All committers have signed the CLA."` → ✅ signed
|
||||
- Body contains `"not signed"` or a link to sign → ❌ not signed
|
||||
- No comment → ❌ treat as not signed
|
||||
|
||||
#### B. PR title format
|
||||
|
||||
For all types except `revert`, the title must match:
|
||||
```
|
||||
^(feat|fix|perf|test|docs|refactor|build|ci|chore)(\([a-zA-Z0-9 ]+( Node)?\))?!?: [A-Z].+[^.]$
|
||||
```
|
||||
|
||||
For `revert` titles, the summary is the original commit header (which starts with a lowercase type), so capitalization is not enforced:
|
||||
```
|
||||
^revert(\([a-zA-Z0-9 ]+( Node)?\))?!?: .+[^.]$
|
||||
```
|
||||
|
||||
- Type must be one of: `feat fix perf test docs refactor build ci chore revert`
|
||||
- Scope is optional, in parentheses e.g. `(editor)` or `(Slack Node)`
|
||||
- Breaking changes: `!` before the colon
|
||||
- Summary: starts with capital letter (lowercase allowed for `revert:`), no trailing period
|
||||
- No Linear ticket IDs in the title (e.g. `N8N-1234`)
|
||||
|
||||
#### C. PR description completeness
|
||||
|
||||
1. **Summary** (`## Summary`) — must have non-empty content below the heading (not just the HTML comment).
|
||||
2. **Related tickets** (`## Related Linear tickets, Github issues, and Community forum posts`) — acceptable content: a URL (`http`), a GitHub closing keyword (`closes #N`, `fixes #N`, `resolves #N`, etc.), or empty. Only flag if the section heading is missing entirely.
|
||||
3. **Checklist** (`## Review / Merge checklist`) — all four items must be present. Unchecked checkboxes are expected for community PRs; do **not** flag them as missing.
|
||||
|
||||
#### D. Tests
|
||||
|
||||
Skip this check if the PR type (from the title) is `docs`, `ci`, `chore`, or `build`.
|
||||
|
||||
Otherwise:
|
||||
1. Identify source files changed: non-test files under `packages/` from the `files` list.
|
||||
2. If there are source file changes, check out the PR in a temporary worktree:
|
||||
|
||||
```bash
|
||||
git fetch origin pull/<number>/head:pr/<number>
|
||||
git worktree add /tmp/pr-<number>-review pr/<number>
|
||||
```
|
||||
|
||||
3. Read the changed source files from the worktree to understand whether the changes introduce logic that warrants tests (new functions, bug fixes, behaviour changes, data transformations). Pure config changes, type-only changes, and trivial renames do not require tests.
|
||||
4. Look for matching test files (`*.test.ts`, `*.spec.ts`, files inside `__tests__/`) among the changed files.
|
||||
5. **Always clean up the worktree**, even if a previous check failed:
|
||||
|
||||
```bash
|
||||
git worktree remove /tmp/pr-<number>-review --force
|
||||
git branch -D pr/<number>
|
||||
```
|
||||
|
||||
Report:
|
||||
- ✅ Tests present, or change does not require tests
|
||||
- ❌ Source logic changed but no test files found
|
||||
|
||||
#### E. cubic-dev-ai issues
|
||||
|
||||
Review the PR review comments fetched in step 2. `cubic-dev-ai[bot]` leaves comments for every issue it finds.
|
||||
|
||||
- No comments from `cubic-dev-ai[bot]`, or every comment explicitly states no issues were found → ✅
|
||||
- Any other comment → ❌ report the total count and priority breakdown (e.g. "3 issues: 1× P1, 1× P2, 1× P3")
|
||||
|
||||
### 4. Output
|
||||
|
||||
Always output valid JSON in this exact shape:
|
||||
|
||||
```json
|
||||
{
|
||||
"readyForReview": <true if all passing checks allow merge, false otherwise>,
|
||||
"messageForUser": "<Human-readable summary of what needs to change, written as if posted directly to the PR contributor. 'N/A' if nothing is needed.>",
|
||||
"checks": {
|
||||
"CLA": <true if signed, false if not signed or pending>,
|
||||
"Title": <true if title matches convention, false otherwise>,
|
||||
"Description": <true if all three template sections are complete, false otherwise>,
|
||||
"TestsNeeded": <true if the code changes require tests, false if not applicable>,
|
||||
"TestsIncluded": <true if test files are present in the PR, false otherwise>,
|
||||
"CubicIssues": <true if cubic-dev-ai raised issues, false if no issues>
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
`readyForReview` is `true` only when: `CLA`, `Title`, and `Description` are all `true`; `CubicIssues` is `false`; and either `TestsNeeded` is `false` or `TestsIncluded` is `true`.
|
||||
|
||||
`messageForUser` should be a short, friendly message directed at the contributor listing exactly what they need to address. If `readyForReview` is `true`, set it to `"N/A"`.
|
||||
|
||||
Output nothing other than the JSON block.
|
||||
|
||||
## Notes
|
||||
|
||||
- Draft PRs — report all findings but note the PR is a draft.
|
||||
- If the PR is already merged or closed, say so and skip the checks.
|
||||
- Always remove the worktree even if earlier checks failed.
|
||||
|
|
@ -1,330 +0,0 @@
|
|||
---
|
||||
name: n8n:content-design
|
||||
description: >
|
||||
Product content designer for UI copy. Use when writing, reviewing, or auditing
|
||||
user-facing text: button labels, error messages, tooltips, empty states, modal copy,
|
||||
placeholder text, confirmation dialogs, onboarding flows, or i18n strings.
|
||||
Also use when the user says /copy, /content, or /ux-copy.
|
||||
allowed-tools: Read, Grep, Glob, Edit
|
||||
---
|
||||
|
||||
# n8n content design
|
||||
|
||||
You are a Senior Content Designer specializing in SaaS tools. You've written UI
|
||||
copy for complex products — whiteboard tools, workflow automation, enterprise
|
||||
software — where terminology precision directly impacts user success. You treat
|
||||
content as interface: every label, error message, and tooltip is a design decision.
|
||||
|
||||
You think about what the user needs to know first. In any UI surface — modal,
|
||||
tooltip, banner, empty state — you lead with the action or outcome, then add
|
||||
context only if it earns its space.
|
||||
|
||||
You default to concise and neutral, but you know when a moment of warmth or
|
||||
encouragement earns its place — onboarding, empty states, success confirmations.
|
||||
You never force personality where clarity is the job.
|
||||
|
||||
You check your work against the terminology glossary, voice and tone guidelines,
|
||||
and existing UI patterns below. When no guideline covers a case, you flag the
|
||||
inconsistency rather than guessing.
|
||||
|
||||
You push back on feature names that sound good in marketing but confuse
|
||||
in-product. You know the difference between onboarding copy that holds hands
|
||||
and copy that respects user intelligence.
|
||||
|
||||
You write in short sentences. You cut filler words. You prefer "Save" over
|
||||
"Save changes" and "Delete project?" over "Are you sure you want to delete this
|
||||
project?" unless disambiguation is genuinely needed. You understand that empty
|
||||
states, loading states, and error states are content design problems, not
|
||||
afterthoughts.
|
||||
|
||||
---
|
||||
|
||||
## How to work
|
||||
|
||||
### Modes
|
||||
|
||||
When invoked, determine what the user needs:
|
||||
|
||||
1. **Write** — Draft new UI copy. Ask what surface (button, modal, tooltip,
|
||||
error, empty state, and so on) and what the user action or system state is.
|
||||
Deliver 1-3 options ranked by recommendation. For each option, include:
|
||||
- The copy itself
|
||||
- Which surface it targets (if ambiguous from context)
|
||||
- Suggested i18n key (following the naming convention below)
|
||||
- One-line rationale (which guideline it leans on)
|
||||
|
||||
2. **Review** — The user shares existing copy or points to a file. Check it
|
||||
against every rule below. Return a table:
|
||||
|
||||
| Location | Current copy | Issue | Suggested fix |
|
||||
|----------|-------------|-------|---------------|
|
||||
|
||||
Group issues by severity: terminology violations first, then tone, then
|
||||
grammar and formatting. If the copy follows all guidelines, confirm with a
|
||||
brief summary of what was checked (e.g., "Checked against terminology
|
||||
glossary, tone guidelines, grammar rules, and UI patterns — no issues
|
||||
found.").
|
||||
|
||||
3. **Audit** — Scan a file or set of files (Vue components, i18n JSON) for
|
||||
violations. Use Grep and Glob to find patterns, then report.
|
||||
|
||||
### Where copy lives in n8n
|
||||
|
||||
| Location | What's there |
|
||||
|----------|-------------|
|
||||
| `packages/frontend/@n8n/i18n/src/locales/en.json` | All UI strings (i18n keys) |
|
||||
| `packages/frontend/editor-ui/src/**/*.vue` | Inline copy in Vue templates |
|
||||
| `packages/frontend/@n8n/design-system/src/**/*.vue` | Design system component defaults |
|
||||
| `packages/nodes-base/nodes/**/*.ts` | Node descriptions, parameter labels, placeholders |
|
||||
| `packages/@n8n/nodes-langchain/nodes/**/*.ts` | AI node descriptions and labels |
|
||||
| `packages/nodes-base/nodes/**/*Description.ts` | Node parameter `displayName`, `description`, `action`, `placeholder` fields (hardcoded, not i18n'd) |
|
||||
| `packages/@n8n/nodes-langchain/nodes/**/*Description.ts` | AI node parameter descriptions (hardcoded, not i18n'd) |
|
||||
| `packages/cli/src/**/*.ts` | Backend error messages in services/controllers that surface to users (hardcoded) |
|
||||
|
||||
When editing copy, prefer changing the i18n JSON (`en.json`) over hardcoded
|
||||
strings in Vue files. If you find hardcoded user-facing strings in Vue
|
||||
templates, flag them — they should use i18n.
|
||||
|
||||
**i18n patterns** (in order of preference):
|
||||
|
||||
1. `i18n.baseText('key')` — preferred, most common
|
||||
2. `$t('key')` / `t('key')` — Vue i18n plugin shorthand
|
||||
3. `locale.baseText('key')` — legacy pattern, still present in older code
|
||||
|
||||
### i18n key naming convention
|
||||
|
||||
Keys use hierarchical dot-notation matching the feature area:
|
||||
|
||||
| Pattern | Example | When to use |
|
||||
|---------|---------|-------------|
|
||||
| `generic.*` | `generic.cancel`, `generic.save` | Universal labels used across many surfaces |
|
||||
| `featureArea.subArea.element` | `settings.communityNodes.empty.title` | Feature-scoped copy |
|
||||
| `_reusableBaseText.*` | `_reusableBaseText.credential` | Shared constants referenced by other keys |
|
||||
| `_reusableDynamicText.*` | `_reusableDynamicText.simpleInput` | Shared text with dynamic fallbacks |
|
||||
|
||||
When suggesting new keys, follow the existing hierarchy. Browse nearby keys in
|
||||
`en.json` to match the nesting depth and naming style of the feature area.
|
||||
|
||||
---
|
||||
|
||||
## Content guidelines
|
||||
|
||||
### Language and grammar
|
||||
|
||||
**US English.** Always. No exceptions.
|
||||
- Do: "categorizing", "color", "analyze"
|
||||
- Don't: "categorising", "colour", "analyse"
|
||||
|
||||
**Active voice** whenever possible.
|
||||
- Do: "Administrators control user access to n8n Cloud."
|
||||
- Don't: "User access to n8n Cloud is controlled by administrators."
|
||||
|
||||
**Sentence case** for all titles, headings, menu items, labels, and buttons.
|
||||
Only capitalize the first word and proper nouns.
|
||||
- Do: "What triggers this workflow?", "Zoom in"
|
||||
- Don't: "What Triggers This Workflow?", "Zoom In"
|
||||
|
||||
**Periods.** A single sentence or fragment doesn't need one. If there are
|
||||
multiple sentences (including in tooltips), all of them need one.
|
||||
- "Settings" — single label, no period
|
||||
- "New workflow executions will show here." — multiple sentences need periods
|
||||
- Not: "Settings."
|
||||
|
||||
**Contractions.** Use them. They keep the tone conversational.
|
||||
- Do: can't, don't, it's, you'll, we're
|
||||
- Don't: cannot, can not, it is, you will, we are
|
||||
|
||||
**Oxford comma.** Always.
|
||||
- Do: "Connect apps, databases, and APIs."
|
||||
- Don't: "Connect apps, databases and APIs."
|
||||
|
||||
**Abbreviations.** Don't use internal abbreviations or jargon in
|
||||
customer-facing copy. Spell out unfamiliar terms on first use.
|
||||
- Do: "Role-based access control (RBAC)"
|
||||
- Don't: "RBAC" alone without introduction
|
||||
|
||||
Plural abbreviations: "APIs" not "API's".
|
||||
|
||||
**No Latin abbreviations.** Use plain alternatives.
|
||||
|
||||
| Don't use | Use instead |
|
||||
|-----------|-------------|
|
||||
| e.g. | for example, such as |
|
||||
| i.e. | that is, in other words |
|
||||
| etc. | and so on |
|
||||
| vs / versus | compared to, or |
|
||||
| via | through, with, using |
|
||||
| n.b. | note |
|
||||
| ad hoc | unscheduled, temporary, bespoke |
|
||||
| per se | necessarily, intrinsically |
|
||||
|
||||
**Dates.** US format. Spell out months when space allows.
|
||||
- Do: "Apr 2", "February 14, 2025"
|
||||
- Don't: "2. Apr", "02/14/2025"
|
||||
|
||||
**Times.** 24-hour format with leading zero (technical audience).
|
||||
- Do: 13:34, 07:52
|
||||
- Don't: 1:34 PM, 7:52
|
||||
|
||||
**Numbers.** Commas for thousands, period for decimals.
|
||||
- Do: 23,456 and 346.65
|
||||
- Don't: 23456 and 346,65
|
||||
|
||||
### Tone and voice
|
||||
|
||||
Write like a knowledgeable colleague, not a manual or a marketing page. Be
|
||||
technical when precision matters, but default to plain language.
|
||||
|
||||
**Do:**
|
||||
- Be direct. Lead with the most important information.
|
||||
- Use simple words: "use" not "utilize", "so" not "therefore", "but" not
|
||||
"however", "give" not "provide".
|
||||
- Write short sentences. Break complex ideas into smaller pieces.
|
||||
- Use humor sparingly and only in low-stakes contexts (tooltips,
|
||||
parentheticals, empty states). Never in errors or warnings.
|
||||
- Address the user as "you". Refer to n8n as "n8n" or "we" depending on
|
||||
context.
|
||||
|
||||
**Don't:**
|
||||
- Use formal business language or marketing-speak.
|
||||
- Be overly enthusiastic or use filler words.
|
||||
- Use "please" excessively. One "please" is fine. Three in a paragraph is too
|
||||
many.
|
||||
- Anthropomorphize the product ("n8n thinks...", "n8n wants to...").
|
||||
|
||||
**Quick reference:**
|
||||
|
||||
| Avoid | Prefer |
|
||||
|-------|--------|
|
||||
| "Utilize the dropdown to select your preferred option" | "Select an option from the dropdown" |
|
||||
| "We are sorry, but we are unable to process your request" | "Something went wrong. Try again in a few minutes." |
|
||||
| "You have successfully created a new workflow!" | "Workflow created" |
|
||||
| "Please be advised that this action cannot be undone" | "This can't be undone" |
|
||||
|
||||
### UI copy patterns
|
||||
|
||||
**Action labels (buttons and CTAs).** Start with a verb. Be specific.
|
||||
- Do: "Add connection", "Save workflow", "Delete credential"
|
||||
- Don't: "New", "Submit", "OK"
|
||||
|
||||
For destructive actions, name what's being destroyed: "Delete workflow" not just
|
||||
"Delete". Use "Cancel" for aborting a process, "Close" for dismissing
|
||||
informational dialogs.
|
||||
|
||||
**Error messages.** Structure: what happened + why (if known) + what to do next.
|
||||
Always include at least what happened and what to do.
|
||||
- Do: "Connection failed. Check that the API key is correct and try again."
|
||||
- Do: "Workflow can't be saved. The name field is required."
|
||||
- Don't: "Error 403"
|
||||
- Don't: "Something went wrong"
|
||||
- Don't: "Invalid input. Please try again."
|
||||
|
||||
Never blame the user: "The API key isn't valid" not "You entered an invalid API
|
||||
key".
|
||||
|
||||
**Empty states.** Guide, don't just inform. Explain what the area is for and
|
||||
give a clear next step.
|
||||
- Do: "No executions yet. Run this workflow to see results here."
|
||||
- Don't: "No data"
|
||||
|
||||
**Placeholder text.** Use realistic examples. Don't repeat the label.
|
||||
- Do: Label: "Webhook URL" / Placeholder: "https://example.com/webhook"
|
||||
- Don't: Label: "Webhook URL" / Placeholder: "Enter webhook URL"
|
||||
|
||||
**Confirmation dialogs.** State the consequence. Use the specific action as the
|
||||
confirm button label.
|
||||
- Title: "Delete workflow?"
|
||||
- Body: "This will permanently delete 'My Workflow' and its execution history.
|
||||
This can't be undone."
|
||||
- Buttons: "Delete workflow" / "Cancel"
|
||||
|
||||
**Tooltips.** One or two sentences. Add information the label alone can't
|
||||
convey — don't repeat the label.
|
||||
- Do: "Pins the output data so the node uses it in future test runs instead of
|
||||
fetching new data."
|
||||
- Don't: "Click to pin data"
|
||||
|
||||
**Truncation.** Use ellipsis (…). Show full text on hover/tooltip. Node and
|
||||
workflow names: truncate from end. File paths: truncate from middle.
|
||||
|
||||
### Terminology
|
||||
|
||||
Use these terms consistently. Don't capitalize unless starting a sentence.
|
||||
|
||||
| Term | Usage | Avoid |
|
||||
|------|-------|-------|
|
||||
| workflow | The automation a user builds | flow, automation, scenario |
|
||||
| node | A step in a workflow | block, step, action |
|
||||
| trigger | The node that starts a workflow | starter, initiator |
|
||||
| execution | A single run of a workflow | run, instance |
|
||||
| credential | Stored authentication for a service | secret, key, token (unless technically specific) |
|
||||
| canvas | The area where users build workflows | editor, board |
|
||||
| connection | The line between two nodes | edge, link, wire |
|
||||
| input/output | Data going into or out of a node | payload (unless technically specific) |
|
||||
| pin | Saving node output for reuse in testing | freeze, lock, save |
|
||||
|
||||
### n8n-specific conventions
|
||||
|
||||
- **"n8n" is always lowercase**, even at the start of a sentence. Never write
|
||||
"N8n" or "N8N".
|
||||
- **Node names are proper nouns** — capitalize both words: "Slack Node",
|
||||
"GitHub Node", "HTTP Request Node".
|
||||
- **Feature names are lowercase** unless starting a sentence: canvas, workflow,
|
||||
credential, execution.
|
||||
- **"n8n Cloud"** is the hosted product name — always capitalize "Cloud".
|
||||
|
||||
### Surfaces not covered by guidelines
|
||||
|
||||
The guidelines above cover most UI surfaces. For these additional surfaces,
|
||||
apply the same voice and tone principles:
|
||||
|
||||
**Loading states** — keep short, no period, use ellipsis:
|
||||
- Do: "Loading workflows…"
|
||||
- Don't: "Please wait while we load your workflows."
|
||||
|
||||
**Success notifications** — state what happened, past tense, no exclamation:
|
||||
- Do: "Workflow saved"
|
||||
- Don't: "Workflow was saved successfully!"
|
||||
|
||||
**Status labels** — sentence case, present tense or past participle:
|
||||
- Do: "Active", "Running", "Error", "Disabled"
|
||||
- Don't: "ACTIVE", "Currently Running", "Has Errors"
|
||||
|
||||
### Common audit patterns
|
||||
|
||||
When running Audit mode, use these grep patterns against `en.json` and Vue
|
||||
files to find the most common violations:
|
||||
|
||||
| Violation | Grep pattern | Notes |
|
||||
|-----------|-------------|-------|
|
||||
| Latin abbreviations | `e\.g\.\|i\.e\.\|etc\.\| via \| vs ` | 50+ instances typical |
|
||||
| Missing contractions | `cannot\|do not\|will not\|does not\|is not\|are not` | 20+ instances typical |
|
||||
| "please" overuse | `[Pp]lease` | Review each in context — one per surface is fine |
|
||||
| User-blaming language | `You need\|You must\|You entered\|You have to` | Rewrite to focus on the system state |
|
||||
| Passive voice | `was created\|is controlled\|will be shown\|was deleted` | Not exhaustive — scan manually too |
|
||||
|
||||
Run each pattern with Grep against the relevant files, then triage results by
|
||||
severity: terminology violations first, then tone, then grammar/formatting.
|
||||
|
||||
---
|
||||
|
||||
## Checklist
|
||||
|
||||
Before finalizing any copy, verify:
|
||||
|
||||
- [ ] US English spelling
|
||||
- [ ] Active voice
|
||||
- [ ] Sentence case (not Title Case)
|
||||
- [ ] Contractions used
|
||||
- [ ] Oxford comma present in lists
|
||||
- [ ] No Latin abbreviations (e.g., i.e., etc., via, vs)
|
||||
- [ ] No "please" overuse
|
||||
- [ ] No user-blaming language in errors
|
||||
- [ ] Terminology matches glossary exactly
|
||||
- [ ] Single fragments have no trailing period
|
||||
- [ ] Multi-sentence groups all have periods
|
||||
- [ ] Button labels start with a verb
|
||||
- [ ] Destructive actions name the thing being destroyed
|
||||
- [ ] Error messages include what happened + what to do
|
||||
- [ ] Empty states include a next step
|
||||
- [ ] Placeholders use realistic examples, not label echoes
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
---
|
||||
name: n8n:conventions
|
||||
description: Quick reference for n8n patterns. Full docs /AGENTS.md
|
||||
---
|
||||
|
||||
# n8n Quick Reference
|
||||
|
||||
**📚 Full Documentation:**
|
||||
- **General:** `/AGENTS.md` - Architecture, commands, workflows
|
||||
- **Frontend:** `/packages/frontend/AGENTS.md` - CSS variables, timing
|
||||
|
||||
Use this skill when you need quick reminders on critical patterns.
|
||||
|
||||
## Critical Rules (Must Follow)
|
||||
|
||||
**TypeScript:**
|
||||
- Never `any` → use `unknown`
|
||||
- Prefer `satisfies` over `as` (except tests)
|
||||
- Shared types in `@n8n/api-types`
|
||||
|
||||
**Error Handling:**
|
||||
```typescript
|
||||
import { UnexpectedError } from 'n8n-workflow';
|
||||
throw new UnexpectedError('message', { extra: { context } });
|
||||
// DON'T use deprecated ApplicationError
|
||||
```
|
||||
|
||||
**Frontend:**
|
||||
- Vue 3 Composition API (`<script setup lang="ts">`)
|
||||
- CSS variables (never hardcode px) - see `/packages/frontend/AGENTS.md`
|
||||
- All text via i18n (`$t('key')`)
|
||||
- `data-testid` for E2E (single value, no spaces)
|
||||
|
||||
**Backend:**
|
||||
- Controller → Service → Repository
|
||||
- Dependency injection via `@n8n/di`
|
||||
- Config via `@n8n/config`
|
||||
- Zod schemas for validation
|
||||
|
||||
**Testing:**
|
||||
- Vitest (unit), Playwright (E2E)
|
||||
- Mock external dependencies
|
||||
- Work from package directory: `pushd packages/cli && pnpm test`
|
||||
|
||||
**Database:**
|
||||
- SQLite/PostgreSQL only (app DB)
|
||||
- Exception: DB nodes (MySQL Node, etc.) can use DB-specific features
|
||||
|
||||
**Commands:**
|
||||
```bash
|
||||
pnpm build > build.log 2>&1 # Always redirect
|
||||
pnpm typecheck # Before commit
|
||||
pnpm lint # Before commit
|
||||
```
|
||||
|
||||
## Key Packages
|
||||
|
||||
| Package | Purpose |
|
||||
|---------|---------|
|
||||
| `packages/cli` | Backend API |
|
||||
| `packages/frontend/editor-ui` | Vue 3 frontend |
|
||||
| `packages/@n8n/api-types` | Shared types |
|
||||
| `packages/@n8n/db` | TypeORM entities |
|
||||
| `packages/workflow` | Core interfaces |
|
||||
|
||||
## Common Patterns
|
||||
|
||||
**Pinia Store:**
|
||||
```typescript
|
||||
import { STORES } from '@n8n/stores';
|
||||
export const useMyStore = defineStore(STORES.MY_STORE, () => {
|
||||
const state = shallowRef([]);
|
||||
return { state };
|
||||
});
|
||||
```
|
||||
|
||||
**Vue Component:**
|
||||
```vue
|
||||
<script setup lang="ts">
|
||||
type Props = { title: string };
|
||||
const props = defineProps<Props>();
|
||||
</script>
|
||||
```
|
||||
|
||||
**Service:**
|
||||
```typescript
|
||||
import { Service } from '@n8n/di';
|
||||
import { Config } from '@n8n/config';
|
||||
|
||||
@Service()
|
||||
export class MyService {
|
||||
constructor(private readonly config: Config) {}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
📖 **Need more details?** Read `/AGENTS.md` and `/packages/frontend/AGENTS.md`
|
||||
|
|
@ -1,218 +0,0 @@
|
|||
---
|
||||
name: n8n:create-community-node-lint-rule
|
||||
description: >-
|
||||
Create new ESLint rules for the @n8n/eslint-plugin-community-nodes package.
|
||||
Use when adding a lint rule, creating a community node lint, or working on
|
||||
eslint-plugin-community-nodes. Guides rule implementation, tests, docs, and
|
||||
plugin registration.
|
||||
---
|
||||
|
||||
# Create Community Node Lint Rule
|
||||
|
||||
Guide for adding new ESLint rules to `packages/@n8n/eslint-plugin-community-nodes/`.
|
||||
|
||||
All paths below are relative to `packages/@n8n/eslint-plugin-community-nodes/`.
|
||||
|
||||
## Step 1: Understand the Rule
|
||||
|
||||
Before writing code, clarify:
|
||||
- **What** does the rule detect? (missing property, wrong pattern, bad value)
|
||||
- **Where** does it apply? (`.node.ts` files, credential classes, both)
|
||||
- **Severity**: `error` (must fix) or `warn` (should fix)?
|
||||
- **Fixable?** Can it be auto-fixed safely, or only suggest?
|
||||
- **Scope**: Both `recommended` configs, or exclude from `recommendedWithoutN8nCloudSupport`?
|
||||
|
||||
## Step 2: Implement the Rule
|
||||
|
||||
Create `src/rules/<rule-name>.ts`:
|
||||
|
||||
```typescript
|
||||
import { AST_NODE_TYPES } from '@typescript-eslint/utils';
|
||||
|
||||
import {
|
||||
isNodeTypeClass, // or isCredentialTypeClass
|
||||
findClassProperty,
|
||||
findObjectProperty,
|
||||
createRule,
|
||||
} from '../utils/index.js';
|
||||
|
||||
export const YourRuleNameRule = createRule({
|
||||
name: 'rule-name',
|
||||
meta: {
|
||||
type: 'problem', // or 'suggestion'
|
||||
docs: {
|
||||
description: 'One-line description of what the rule enforces',
|
||||
},
|
||||
messages: {
|
||||
messageId: 'Human-readable message. Use {{placeholder}} for dynamic data.',
|
||||
},
|
||||
fixable: 'code', // omit if not auto-fixable
|
||||
hasSuggestions: true, // omit if no suggestions
|
||||
schema: [], // add options schema if configurable
|
||||
},
|
||||
defaultOptions: [],
|
||||
create(context) {
|
||||
return {
|
||||
ClassDeclaration(node) {
|
||||
if (!isNodeTypeClass(node)) return;
|
||||
|
||||
const descriptionProperty = findClassProperty(node, 'description');
|
||||
if (!descriptionProperty) return;
|
||||
|
||||
const descriptionValue = descriptionProperty.value;
|
||||
if (descriptionValue?.type !== AST_NODE_TYPES.ObjectExpression) return;
|
||||
|
||||
// Rule logic here — use findObjectProperty(), getLiteralValue(), etc.
|
||||
|
||||
context.report({
|
||||
node: targetNode,
|
||||
messageId: 'messageId',
|
||||
data: { /* template vars */ },
|
||||
fix(fixer) {
|
||||
return fixer.replaceText(targetNode, 'replacement');
|
||||
},
|
||||
});
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Naming**: Export as `PascalCaseRule` (e.g. `MissingPairedItemRule`). The `name` field is kebab-case.
|
||||
|
||||
**Available AST helpers** — see [reference.md](reference.md) for the full catalog of `ast-utils` and `file-utils` exports.
|
||||
|
||||
## Step 3: Write Tests
|
||||
|
||||
Create `src/rules/<rule-name>.test.ts`:
|
||||
|
||||
```typescript
|
||||
import { RuleTester } from '@typescript-eslint/rule-tester';
|
||||
|
||||
import { YourRuleNameRule } from './rule-name.js';
|
||||
|
||||
const ruleTester = new RuleTester();
|
||||
|
||||
// Helper to generate test code — keeps test cases readable
|
||||
function createNodeCode(/* parameterize the varying parts */): string {
|
||||
return `
|
||||
import type { INodeType, INodeTypeDescription } from 'n8n-workflow';
|
||||
|
||||
export class TestNode implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Test Node',
|
||||
name: 'testNode',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
description: 'A test node',
|
||||
defaults: { name: 'Test Node' },
|
||||
inputs: [],
|
||||
outputs: [],
|
||||
properties: [],
|
||||
};
|
||||
}`;
|
||||
}
|
||||
|
||||
ruleTester.run('rule-name', YourRuleNameRule, {
|
||||
valid: [
|
||||
{ name: 'class that does not implement INodeType', code: '...' },
|
||||
{ name: 'node with correct pattern', code: createNodeCode(/* correct */) },
|
||||
],
|
||||
invalid: [
|
||||
{
|
||||
name: 'descriptive case name',
|
||||
code: createNodeCode(/* incorrect */),
|
||||
errors: [{ messageId: 'messageId', data: { /* expected template vars */ } }],
|
||||
output: createNodeCode(/* expected after fix */), // or `output: null` if no fix
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
**Test guidelines:**
|
||||
- Always test that non-INodeType classes are skipped (valid case)
|
||||
- Test both the error message and the fixed output for fixable rules
|
||||
- For rules with options, test each option combination
|
||||
- For rules using filesystem, mock with `vi.mock('../utils/file-utils.js')`
|
||||
- For suggestion-only rules, use `errors: [{ messageId, suggestions: [...] }]`
|
||||
|
||||
## Step 4: Register the Rule
|
||||
|
||||
### 4a. Add to `src/rules/index.ts`
|
||||
|
||||
```typescript
|
||||
import { YourRuleNameRule } from './rule-name.js';
|
||||
|
||||
// Add to the rules object:
|
||||
export const rules = {
|
||||
// ... existing rules
|
||||
'rule-name': YourRuleNameRule,
|
||||
} satisfies Record<string, AnyRuleModule>;
|
||||
```
|
||||
|
||||
### 4b. Add to `src/plugin.ts` configs
|
||||
|
||||
Add to **both** config objects (unless the rule depends on n8n cloud features):
|
||||
|
||||
```typescript
|
||||
'@n8n/community-nodes/rule-name': 'error', // or 'warn'
|
||||
```
|
||||
|
||||
- Use `error` for rules that catch bugs or required patterns
|
||||
- Use `warn` for style/convention rules (like `options-sorted-alphabetically`)
|
||||
- If the rule uses `no-restricted-globals` or `no-restricted-imports` patterns,
|
||||
only add to `recommended` (not `recommendedWithoutN8nCloudSupport`)
|
||||
|
||||
## Step 5: Write Documentation
|
||||
|
||||
Create `docs/rules/<rule-name>.md`:
|
||||
|
||||
```markdown
|
||||
# Description of what the rule does (`@n8n/community-nodes/rule-name`)
|
||||
|
||||
<!-- end auto-generated rule header -->
|
||||
|
||||
## Rule Details
|
||||
|
||||
Explain why this rule exists and what problem it prevents.
|
||||
|
||||
## Examples
|
||||
|
||||
### Incorrect
|
||||
|
||||
\`\`\`typescript
|
||||
// code that triggers the rule
|
||||
\`\`\`
|
||||
|
||||
### Correct
|
||||
|
||||
\`\`\`typescript
|
||||
// code that passes the rule
|
||||
\`\`\`
|
||||
```
|
||||
|
||||
The header above `<!-- end auto-generated rule header -->` will be regenerated by `pnpm build:docs`. Write a reasonable first version — it gets overwritten.
|
||||
|
||||
## Step 6: Verify
|
||||
|
||||
Run from `packages/@n8n/eslint-plugin-community-nodes/`:
|
||||
|
||||
```bash
|
||||
pushd packages/@n8n/eslint-plugin-community-nodes
|
||||
pnpm test <rule-name>.test.ts # tests pass
|
||||
pnpm typecheck # types are clean
|
||||
pnpm build # compiles
|
||||
pnpm build:docs # regenerates doc headers and README table
|
||||
pnpm lint:docs # docs match schema
|
||||
popd
|
||||
```
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Rule file: `src/rules/<rule-name>.ts`
|
||||
- [ ] Test file: `src/rules/<rule-name>.test.ts`
|
||||
- [ ] Registered in `src/rules/index.ts`
|
||||
- [ ] Added to configs in `src/plugin.ts`
|
||||
- [ ] Doc file: `docs/rules/<rule-name>.md`
|
||||
- [ ] README table updated via `pnpm build:docs`
|
||||
- [ ] All verification commands pass
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
# AST & File Utilities Reference
|
||||
|
||||
Helpers available from `../utils/index.js`. Use these instead of writing custom AST traversal.
|
||||
|
||||
## ast-utils.ts
|
||||
|
||||
### Class/Interface detection
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `isNodeTypeClass(node)` | `boolean` | Check if class implements `INodeType` or extends `Node` |
|
||||
| `isCredentialTypeClass(node)` | `boolean` | Check if class implements `ICredentialType` |
|
||||
|
||||
### Property finding
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `findClassProperty(node, name)` | `PropertyDefinition \| null` | Find a property on a class (e.g. `description`, `icon`) |
|
||||
| `findObjectProperty(obj, name)` | `Property \| null` | Find a property in an object literal (Identifier key) |
|
||||
| `findJsonProperty(obj, name)` | `Property \| null` | Find a property with a Literal key (JSON-style `"key"`) |
|
||||
| `findArrayLiteralProperty(obj, name)` | `Property \| null` | Find a property whose value is an ArrayExpression |
|
||||
|
||||
### Value extraction
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `getLiteralValue(node)` | `string \| boolean \| number \| null` | Extract primitive from a Literal node |
|
||||
| `getStringLiteralValue(node)` | `string \| null` | Extract string specifically |
|
||||
| `getBooleanLiteralValue(node)` | `boolean \| null` | Extract boolean specifically |
|
||||
| `getModulePath(node)` | `string \| null` | Get import path from string literal or template literal |
|
||||
|
||||
### Array operations
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `hasArrayLiteralValue(arr, value)` | `boolean` | Check if array contains a specific string literal |
|
||||
| `extractCredentialInfoFromArray(element)` | `{ name, testedBy } \| null` | Parse credential object from array element |
|
||||
| `extractCredentialNameFromArray(element)` | `string \| null` | Get just the credential name from array element |
|
||||
|
||||
### Method matching
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `isThisHelpersAccess(node)` | `boolean` | Match `this.helpers` member expression |
|
||||
| `isThisMethodCall(node, method)` | `boolean` | Match `this.methodName(...)` calls |
|
||||
| `isThisHelpersMethodCall(node, method)` | `boolean` | Match `this.helpers.methodName(...)` calls |
|
||||
|
||||
### Similarity
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `findSimilarStrings(target, candidates, maxDistance?)` | `string[]` | Suggest similar names (Levenshtein distance) |
|
||||
|
||||
## file-utils.ts
|
||||
|
||||
### Path operations
|
||||
|
||||
| Function | Use when |
|
||||
|----------|----------|
|
||||
| `isContainedWithin(child, parent)` | Check path is within a directory |
|
||||
| `safeJoinPath(base, ...parts)` | Join paths with traversal prevention |
|
||||
|
||||
### Package.json
|
||||
|
||||
| Function | Returns | Use when |
|
||||
|----------|---------|----------|
|
||||
| `findPackageJson(startDir)` | `string \| null` | Walk up to find nearest package.json |
|
||||
| `readPackageJsonN8n(startDir)` | `N8nPackageJson \| null` | Parse n8n config section |
|
||||
| `readPackageJsonCredentials(startDir)` | `Set<string>` | Get credential names from package.json |
|
||||
| `readPackageJsonNodes(startDir)` | `string[]` | Get resolved node file paths |
|
||||
|
||||
### File system
|
||||
|
||||
| Function | Use when |
|
||||
|----------|----------|
|
||||
| `validateIconPath(filePath, iconValue)` | Check icon file exists and is SVG |
|
||||
| `extractCredentialNameFromFile(filePath)` | Parse credential class name from file |
|
||||
| `fileExistsWithCaseSync(filePath)` | Case-sensitive existence check |
|
||||
| `findSimilarSvgFiles(dir, name)` | Suggest similar SVG filenames |
|
||||
|
||||
### Credential verification
|
||||
|
||||
| Function | Use when |
|
||||
|----------|----------|
|
||||
| `areAllCredentialUsagesTestedByNodes(startDir)` | Check all credentials have testedBy |
|
||||
|
|
@ -1,361 +0,0 @@
|
|||
---
|
||||
name: n8n:create-issue
|
||||
description: Create Linear tickets or GitHub issues following n8n conventions. Use when the user asks to create a ticket, file a bug, open an issue, or says /create-issue.
|
||||
argument-hint: "[linear|github] <description of the issue>"
|
||||
compatibility:
|
||||
requires:
|
||||
- mcp: linear
|
||||
description: Required for creating Linear tickets
|
||||
- cli: gh
|
||||
description: Required for creating GitHub issues. Must be authenticated (gh auth login)
|
||||
---
|
||||
|
||||
# Create Issue
|
||||
|
||||
Create a Linear ticket or GitHub issue for: **$ARGUMENTS**
|
||||
|
||||
## Determine Target
|
||||
|
||||
Decide where the issue should be created based on user input:
|
||||
|
||||
- If the user says "Linear", "ticket", or provides a team key (e.g., AI, NODE, N8N) → **Linear**
|
||||
- If the user says "GitHub", "GH issue", or "open source" → **GitHub**
|
||||
- If ambiguous, **ask the user** which platform they want
|
||||
|
||||
---
|
||||
|
||||
## Linear Tickets
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Verify the Linear MCP is connected before proceeding.
|
||||
|
||||
### Style Guide
|
||||
|
||||
#### Title
|
||||
|
||||
- **Sentence case** — capitalize only the first word (e.g., "Add webhook verification to Trello trigger")
|
||||
- **Descriptive** — a reader should understand the scope without opening the ticket
|
||||
- **5–15 words** — long enough to be specific, short enough to scan
|
||||
- **Imperative mood for features/enhancements** — "Add ...", "Support ...", "Improve ..."
|
||||
- **Bug titles** — prefix with `Bug -` followed by a description of the symptom (e.g., "Bug - Pin data not updating after workflow edit")
|
||||
- **No ticket IDs in titles** — the identifier (AI-1234) is assigned automatically
|
||||
- **No trailing punctuation**
|
||||
|
||||
#### Description
|
||||
|
||||
Structure the description using markdown headers. Use the appropriate template:
|
||||
|
||||
**For bugs:**
|
||||
|
||||
```markdown
|
||||
## Description
|
||||
[Clear explanation of the problem]
|
||||
|
||||
## Expected
|
||||
[What should happen]
|
||||
|
||||
## Actual
|
||||
[What happens instead]
|
||||
|
||||
## Attachments
|
||||
[Screenshots, videos, or screen recordings that illustrate the problem]
|
||||
|
||||
## Steps to reproduce
|
||||
1. [Step-by-step reproduction]
|
||||
|
||||
## Additional context
|
||||
- n8n version: [version]
|
||||
- Database: [SQLite/PostgreSQL]
|
||||
- Hosting: [cloud/self-hosted]
|
||||
```
|
||||
|
||||
**For features / enhancements:**
|
||||
|
||||
```markdown
|
||||
## Summary
|
||||
[One-paragraph overview of what this adds or changes]
|
||||
|
||||
## Problem
|
||||
[What limitation or gap exists today]
|
||||
|
||||
## Proposed solution
|
||||
[How it should work — technical approach if known]
|
||||
|
||||
## Out of scope
|
||||
[Explicitly note what this does NOT cover, if helpful]
|
||||
```
|
||||
|
||||
**For tech debt:**
|
||||
|
||||
```markdown
|
||||
## Summary
|
||||
[What technical improvement is needed]
|
||||
|
||||
## Current state
|
||||
[What the code/system looks like today and why it's problematic]
|
||||
|
||||
## Proposed improvement
|
||||
[What the improved state should look like]
|
||||
|
||||
## Motivation
|
||||
[Why this matters — maintainability, performance, developer experience, etc.]
|
||||
|
||||
## Scope
|
||||
[What is included / excluded from this work]
|
||||
```
|
||||
|
||||
**For spikes / investigations:**
|
||||
|
||||
```markdown
|
||||
## Goal
|
||||
[What question are we trying to answer]
|
||||
|
||||
## Context
|
||||
[Why this investigation is needed now]
|
||||
|
||||
## Expected output
|
||||
[What deliverable is expected — RFC, PoC, decision document, etc.]
|
||||
```
|
||||
|
||||
#### Attachments (Screenshots / Videos)
|
||||
|
||||
If the user provides screenshots, videos, or screen recordings:
|
||||
|
||||
- **URLs** — embed directly in the description using markdown image syntax (``)
|
||||
- **File paths** — if the user provides a local file path, ask them to upload it to a hosting service (e.g., GitHub, Imgur) or use `mcp__linear-server__create_attachment` to attach it to the Linear ticket after creation
|
||||
- **Pasted images in conversation** — describe what the image shows in the ticket description and note that a screenshot was provided. You cannot upload binary data directly.
|
||||
|
||||
Always mention in the description when visual evidence was provided, even if it cannot be directly embedded.
|
||||
|
||||
#### Priority
|
||||
|
||||
| Value | Level | When to use |
|
||||
|-------|----------|-------------|
|
||||
| 4 | Low | Nice-to-have, no user impact |
|
||||
| 3 | Normal | Default — standard planned work |
|
||||
| 2 | High | Blocks other work or affects users significantly |
|
||||
| 1 | Urgent | Production-breaking, security vulnerability, data loss |
|
||||
| 0 | None | Not yet assessed |
|
||||
|
||||
**Guardrails:**
|
||||
- **Default to Normal (3)** unless the user explicitly states otherwise
|
||||
- **Never set Urgent (1)** unless the user explicitly says "urgent", "P0", "production down", or "security vulnerability"
|
||||
- **Never set None (0)** — always make a priority assessment. If unsure, use Normal (3)
|
||||
|
||||
#### Status
|
||||
|
||||
**Guardrails:**
|
||||
- **Never create issues in Triage status** — Triage is for externally-reported issues that enter through automated pipelines (GitHub sync, support escalation). Agent-created tickets have known context and should skip triage
|
||||
- **Default to Backlog** — use this when the issue is acknowledged but not yet planned for a sprint
|
||||
- **Use Todo** only when the user indicates the work is planned for the current cycle or should be picked up soon
|
||||
- **Never set In Progress, Review, or Done** at creation time
|
||||
|
||||
#### Team
|
||||
|
||||
- **Try to fetch up-to-date team areas of responsibility from Notion** using `mcp__notion__notion-search` (search for "areas of responsibility" or similar). Use the fetched data to determine the best team for the issue.
|
||||
- **If Notion MCP is unavailable or the lookup fails**, fall back to these common teams: `Engineering` (N8N), `AI`, `NODES`, `Identity & Access` (IAM), `Catalysts` (CAT), `Lifecycle & Governance` (LIGO), `Cloud Platform`, `Docs` (DOC)
|
||||
- **Always ask the user which team** if not obvious from context or the Notion lookup
|
||||
- If the issue is node-specific, it likely belongs to `NODES`
|
||||
- If it involves AI/LangChain nodes, it likely belongs to `AI`
|
||||
|
||||
#### Labels
|
||||
|
||||
Apply labels from these groups as appropriate:
|
||||
|
||||
**Type (pick one):**
|
||||
- `bug` — something is broken
|
||||
- `feature` — net-new capability
|
||||
- `enhancement` — improvement to existing functionality
|
||||
- `tech debt` — internal quality improvement
|
||||
- `spike` — time-boxed investigation
|
||||
- `doc` — documentation-only change
|
||||
|
||||
**Area (pick if applicable):**
|
||||
- `frontend`, `backend`, `performance`, `testing`, `infra`, `DX`, `Security-Team`
|
||||
|
||||
**Source (pick if applicable):**
|
||||
- `Internal` — created by team members
|
||||
- `GitHub` — originated from a GitHub issue
|
||||
- `Sentry` — originated from error monitoring
|
||||
- `Zammad` — originated from support
|
||||
|
||||
**Bucket (pick if applicable):**
|
||||
- Use the relevant feature-area bucket (e.g., `Credentials`, `Canvas/Node`, `RBAC`, `LangChain nodes`, `Form Trigger`, etc.)
|
||||
|
||||
**Guardrails:**
|
||||
- **Always apply a type label** — every ticket needs at least a type
|
||||
- **Do not apply triage-state labels** (`Triage: Pending`, `Triage: Complete`, etc.) — these are managed by triage automation
|
||||
- **Do not apply release labels** (`n8n@1.36.0`, etc.) — these are managed by release automation
|
||||
- **Do not apply `docs-automation` labels** — these are managed by docs automation
|
||||
|
||||
#### Estimates
|
||||
|
||||
Only set an estimate if the user provides one or explicitly asks for one. Use t-shirt sizes:
|
||||
|
||||
| Size | Value | Approximate effort |
|
||||
|------|-------|--------------------|
|
||||
| XS | 1 | ≤ 1 hour |
|
||||
| S | 2 | ≤ 1 day |
|
||||
| M | 3 | 2–3 days |
|
||||
| L | 4 | 3–5 days |
|
||||
| XL | 5 | ≥ 6 days |
|
||||
|
||||
### Creating the Ticket
|
||||
|
||||
1. **Gather required fields** — if any are missing, ask the user:
|
||||
- Title
|
||||
- Team
|
||||
- Description (draft one from the user's input using the templates above)
|
||||
|
||||
2. **Present a preview** before creating — show the user:
|
||||
- Title
|
||||
- Team
|
||||
- Status
|
||||
- Priority
|
||||
- Labels
|
||||
- Description (abbreviated if long)
|
||||
|
||||
3. **Wait for user confirmation** — do not create until the user approves
|
||||
|
||||
4. **Create the ticket** using `mcp__linear-server__save_issue`:
|
||||
```
|
||||
title: <title>
|
||||
team: <team name>
|
||||
description: <markdown description>
|
||||
priority: <priority number>
|
||||
state: <status name>
|
||||
labels: [<label names>]
|
||||
```
|
||||
|
||||
5. **Report back** with the issue identifier and URL
|
||||
|
||||
### Things to Never Do (Linear)
|
||||
|
||||
- Never create issues in **Triage** status
|
||||
- Never set **Urgent** priority without explicit user instruction
|
||||
- Never apply **triage-state**, **release**, or **docs-automation** labels
|
||||
- Never set **assignee** unless the user explicitly asks
|
||||
- Never set a **cycle** or **milestone** unless the user explicitly asks
|
||||
- Never create **duplicate issues** — if the user describes something that sounds like it may exist, search first with `mcp__linear-server__list_issues`
|
||||
|
||||
---
|
||||
|
||||
## GitHub Issues
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Verify `gh` CLI is authenticated: `gh auth status`
|
||||
|
||||
### Important Context
|
||||
|
||||
The n8n GitHub issue tracker (`n8n-io/n8n`) is **bug-only**. Feature requests and questions are redirected to the [community forum](https://community.n8n.io). Blank issues are disabled — the bug template must be used.
|
||||
|
||||
### Style Guide
|
||||
|
||||
#### Title
|
||||
|
||||
- **Sentence case** — same as Linear
|
||||
- **Descriptive of the symptom** — what is broken, not what you want
|
||||
- **No prefixes required** — do not add "Bug:" or "Bug Report:" (the template handles categorization)
|
||||
- **No trailing punctuation**
|
||||
|
||||
#### Body
|
||||
|
||||
GitHub issues **must** follow the bug report template structure:
|
||||
|
||||
```markdown
|
||||
### Bug Description
|
||||
|
||||
[Clear explanation of the bug]
|
||||
|
||||
### Steps to Reproduce
|
||||
|
||||
1. [Step 1]
|
||||
2. [Step 2]
|
||||
3. [Step 3]
|
||||
|
||||
### Expected Behavior
|
||||
|
||||
[What should happen]
|
||||
|
||||
### Debug Info
|
||||
|
||||
[If available — output from Help > About n8n > Copy debug information]
|
||||
|
||||
### Operating System
|
||||
|
||||
[e.g., macOS 14.2, Ubuntu 22.04]
|
||||
|
||||
### n8n Version
|
||||
|
||||
[e.g., 1.72.1]
|
||||
|
||||
### Node.js Version
|
||||
|
||||
[e.g., 20.11.0]
|
||||
|
||||
### Database
|
||||
|
||||
SQLite / PostgreSQL
|
||||
|
||||
### Execution Mode
|
||||
|
||||
main / queue
|
||||
|
||||
### Hosting
|
||||
|
||||
n8n cloud / self hosted
|
||||
```
|
||||
|
||||
**Guardrails:**
|
||||
- **Always include reproduction steps** — issues without them get closed as `closed:incomplete-template`
|
||||
- **Include debug info if available** — this is critical for triage
|
||||
- **Never file feature requests as GitHub issues** — redirect the user to the community forum or suggest creating a Linear ticket instead
|
||||
|
||||
#### Labels
|
||||
|
||||
Do **not** manually apply labels when creating GitHub issues. The triage automation handles labeling:
|
||||
- `triage:pending` is auto-applied
|
||||
- `status:in-linear` is auto-applied when synced
|
||||
|
||||
### Creating the Issue
|
||||
|
||||
1. **Verify it's a bug** — if the user describes a feature request, inform them that GitHub issues are bug-only and suggest alternatives (Linear ticket or community forum)
|
||||
|
||||
2. **Draft the issue** using the template above, filling in fields from the user's input
|
||||
|
||||
3. **Present a preview** before creating — show the user:
|
||||
- Title
|
||||
- Body (abbreviated if long)
|
||||
- Repository (default: `n8n-io/n8n`)
|
||||
|
||||
4. **Wait for user confirmation**
|
||||
|
||||
5. **Create the issue** using `gh`:
|
||||
```bash
|
||||
gh issue create --repo n8n-io/n8n --title "<title>" --body "$(cat <<'EOF'
|
||||
<body content>
|
||||
EOF
|
||||
)"
|
||||
```
|
||||
|
||||
6. **Report back** with the issue number and URL
|
||||
|
||||
### Things to Never Do (GitHub)
|
||||
|
||||
- Never file **feature requests** as GitHub issues
|
||||
- Never create issues **without reproduction steps**
|
||||
- Never manually apply **labels** — let automation handle it
|
||||
- Never create issues in **repositories other than n8n-io/n8n** unless the user explicitly specifies
|
||||
|
||||
---
|
||||
|
||||
## Cross-Linking
|
||||
|
||||
When both a Linear ticket and GitHub issue exist for the same problem:
|
||||
|
||||
- **Linear → GitHub**: Add the GitHub issue URL as a link attachment on the Linear ticket
|
||||
- **GitHub → Linear**: Add `https://linear.app/n8n/issue/<TICKET-ID>` in the GitHub issue body
|
||||
|
||||
If the user creates one and mentions the other exists, offer to add the cross-link.
|
||||
|
|
@ -1,193 +0,0 @@
|
|||
---
|
||||
name: n8n:create-pr
|
||||
description: Creates GitHub pull requests with properly formatted titles that pass the check-pr-title CI validation. Use when creating PRs, submitting changes for review, or when the user says /pr or asks to create a pull request.
|
||||
allowed-tools: Bash(git:*), Bash(gh:*), Read, Grep, Glob
|
||||
---
|
||||
|
||||
# Create Pull Request
|
||||
|
||||
Creates GitHub PRs with titles that pass n8n's `check-pr-title` CI validation.
|
||||
|
||||
## PR Title Format
|
||||
|
||||
```
|
||||
<type>(<scope>): <summary>
|
||||
```
|
||||
|
||||
### Types (required)
|
||||
|
||||
| Type | Description | Changelog |
|
||||
|------------|--------------------------------------------------|-----------|
|
||||
| `feat` | New feature | Yes |
|
||||
| `fix` | Bug fix | Yes |
|
||||
| `perf` | Performance improvement | Yes |
|
||||
| `test` | Adding/correcting tests | No |
|
||||
| `docs` | Documentation only | No |
|
||||
| `refactor` | Code change (no bug fix or feature) | No |
|
||||
| `build` | Build system or dependencies | No |
|
||||
| `ci` | CI configuration | No |
|
||||
| `chore` | Routine tasks, maintenance | No |
|
||||
|
||||
### Scopes (optional but recommended)
|
||||
|
||||
- `API` - Public API changes
|
||||
- `benchmark` - Benchmark CLI changes
|
||||
- `core` - Core/backend/private API
|
||||
- `editor` - Editor UI changes
|
||||
- `* Node` - Specific node (e.g., `Slack Node`, `GitHub Node`)
|
||||
|
||||
### Summary Rules
|
||||
|
||||
- Use imperative present tense: "Add" not "Added"
|
||||
- Capitalize first letter
|
||||
- No period at the end
|
||||
- No ticket IDs (e.g., N8N-1234)
|
||||
- Add `(no-changelog)` suffix to exclude from changelog
|
||||
|
||||
## Steps
|
||||
|
||||
1. **Check current state**:
|
||||
```bash
|
||||
git status
|
||||
git diff --stat
|
||||
git log origin/master..HEAD --oneline
|
||||
```
|
||||
|
||||
2. **Check for implementation plan**: Look for a plan file in `.claude/plans/`
|
||||
that matches the current branch's ticket ID (e.g. if branch is
|
||||
`scdekov/PAY-1234-some-feature`, check for `.claude/plans/PAY-1234.md`).
|
||||
If a plan file exists, ask the user whether they want to include it in the
|
||||
PR description as a collapsible `<details>` section (see Plan Section below).
|
||||
Only include the plan if the user explicitly approves.
|
||||
|
||||
3. **If this is a security fix**, audit every public-facing artifact before
|
||||
proceeding (see Security Fixes below).
|
||||
|
||||
4. **Analyze changes** to determine:
|
||||
- Type: What kind of change is this?
|
||||
- Scope: Which package/area is affected?
|
||||
- Summary: What does the change do?
|
||||
|
||||
5. **Push branch if needed**:
|
||||
```bash
|
||||
git push -u origin HEAD
|
||||
```
|
||||
|
||||
6. **Create PR** using gh CLI. Read `.github/pull_request_template.md` as the
|
||||
body structure, then populate each section with actual content before
|
||||
creating the PR:
|
||||
- **Summary**: describe what the PR does and how to test it
|
||||
- **Related tickets**: add the Linear ticket URL (`https://linear.app/n8n/issue/[TICKET-ID]`) and any GitHub issue links
|
||||
- **Checklist**: keep as-is from the template
|
||||
- Add a "🤖 PR Summary generated by AI" at the end of the body
|
||||
|
||||
```bash
|
||||
gh pr create --draft --title "<type>(<scope>): <summary>" --body "$(cat <<'EOF'
|
||||
<populated body based on pull_request_template.md>
|
||||
EOF
|
||||
)"
|
||||
```
|
||||
|
||||
## PR Body Guidelines
|
||||
|
||||
Based on `.github/pull_request_template.md`:
|
||||
|
||||
### Summary Section
|
||||
- Describe what the PR does
|
||||
- Explain how to test the changes
|
||||
- Include screenshots/videos for UI changes
|
||||
|
||||
### Related Links Section
|
||||
- Link to Linear ticket: `https://linear.app/n8n/issue/[TICKET-ID]`
|
||||
- Link to GitHub issues using keywords to auto-close:
|
||||
- `closes #123` / `fixes #123` / `resolves #123`
|
||||
- Link to Community forum posts if applicable
|
||||
|
||||
### Checklist
|
||||
All items should be addressed before merging:
|
||||
- The human author of the PR has checked the "I have seen this code, I have run this code, and I take responsibility for this code." checkbox
|
||||
- PR title follows conventions
|
||||
- Docs updated or follow-up ticket created
|
||||
- Tests included (bugs need regression tests, features need coverage)
|
||||
- `release/backport` label added if urgent fix needs backporting
|
||||
|
||||
## Examples
|
||||
|
||||
### Feature in editor
|
||||
```
|
||||
feat(editor): Add workflow performance metrics display
|
||||
```
|
||||
|
||||
### Bug fix in core
|
||||
```
|
||||
fix(core): Resolve memory leak in execution engine
|
||||
```
|
||||
|
||||
### Node-specific change
|
||||
```
|
||||
fix(Slack Node): Handle rate limiting in message send
|
||||
```
|
||||
|
||||
### Breaking change (add exclamation mark before colon)
|
||||
```
|
||||
feat(API)!: Remove deprecated v1 endpoints
|
||||
```
|
||||
|
||||
### No changelog entry
|
||||
```
|
||||
refactor(core): Simplify error handling (no-changelog)
|
||||
```
|
||||
|
||||
### No scope (affects multiple areas)
|
||||
```
|
||||
chore: Update dependencies to latest versions
|
||||
```
|
||||
|
||||
## Validation
|
||||
|
||||
The PR title must match this pattern:
|
||||
```
|
||||
^(feat|fix|perf|test|docs|refactor|build|ci|chore|revert)(\([a-zA-Z0-9 ]+( Node)?\))?!?: [A-Z].+[^.]$
|
||||
```
|
||||
|
||||
Key validation rules:
|
||||
- Type must be one of the allowed types
|
||||
- Scope is optional but must be in parentheses if present
|
||||
- Exclamation mark for breaking changes goes before the colon
|
||||
- Summary must start with capital letter
|
||||
- Summary must not end with a period
|
||||
|
||||
## Plan Section
|
||||
|
||||
If a matching plan file was found in `.claude/plans/` and the user has approved
|
||||
including it, add a collapsible section at the end of the PR body (after the
|
||||
checklist, before `EOF`):
|
||||
|
||||
```markdown
|
||||
<details>
|
||||
<summary>Implementation plan</summary>
|
||||
|
||||
<!-- paste plan file contents here -->
|
||||
|
||||
</details>
|
||||
```
|
||||
|
||||
## Security Fixes
|
||||
|
||||
**This repo is public.** Never expose the attack vector in any public artifact.
|
||||
Describe **what the code does**, not what threat it prevents.
|
||||
|
||||
| Artifact | BAD | GOOD |
|
||||
|---|---|---|
|
||||
| Branch | `fix-sql-injection-in-webhook` | `fix-webhook-input-validation` |
|
||||
| PR title | `fix(core): Prevent SSRF` | `fix(core): Validate outgoing URLs` |
|
||||
| Commit msg | `fix: prevent denial of service` | `fix: add payload size validation` |
|
||||
| PR body | *"attacker could trigger SSRF…"* | *"validates URL protocol and host"* |
|
||||
| Linear ref | URL with slug (leaks title) | URL without slug or ticket ID only |
|
||||
| Test name | `'should prevent SQL injection'` | `'should sanitize query parameters'` |
|
||||
|
||||
|
||||
**Before pushing a security fix, verify:** no branch name, commit, PR title,
|
||||
PR body, Linear URL, test name, or code comment hints at the vulnerability.
|
||||
|
||||
**When in doubt, check the Linear issue for possible extra precautions**
|
||||
|
|
@ -1,127 +0,0 @@
|
|||
---
|
||||
name: n8n:create-skill
|
||||
description: >-
|
||||
Guides users through creating effective Agent Skills. Use when you want to
|
||||
create, write, or author a new skill, or asks about skill structure, best
|
||||
practices, or SKILL.md format.
|
||||
---
|
||||
# Creating skills
|
||||
|
||||
Skills are markdown (plus optional scripts) that teach the agent a focused workflow. **Keep SKILL.md short**—the context window is shared with chat, code, and other skills.
|
||||
|
||||
## Where skills live
|
||||
|
||||
| Location | When to use |
|
||||
|----------|-------------|
|
||||
| **`.claude/plugins/n8n/skills/<name>/`** | Default for n8n: team-shared, versioned, namespaced under `n8n:`. |
|
||||
| `~/.claude/skills/<name>/` | Personal skill for Claude Code across all projects. |
|
||||
| `~/.cursor/skills/<name>/` | Optional personal skill for Cursor only, global to your machine. |
|
||||
|
||||
**Do not** put custom skills in `~/.cursor/skills-cursor/`—that is reserved for Cursor’s built-in skills.
|
||||
|
||||
Prefer **plugin `.claude/plugins/n8n/skills/`** for anything that should match how the rest of the team works.
|
||||
|
||||
## Before you write: gather requirements
|
||||
|
||||
Ask (or infer) briefly:
|
||||
|
||||
1. **Purpose** — one concrete task or workflow.
|
||||
2. **Triggers** — when should the agent apply this skill?
|
||||
3. **Gaps** — what does the agent *not* already know (project rules, URLs, formats)?
|
||||
4. **Outputs** — templates, checklists, or strict formats?
|
||||
5. **Examples** — follow an existing skill in `.claude/plugins/n8n/skills/` if one fits.
|
||||
|
||||
Ask the user in plain language when you need more detail.
|
||||
|
||||
## File layout
|
||||
|
||||
```
|
||||
skill-name/
|
||||
├── SKILL.md # required
|
||||
├── reference.md # optional — detail the agent reads only if needed
|
||||
├── examples.md # optional
|
||||
└── scripts/ # optional
|
||||
```
|
||||
|
||||
### Frontmatter (required)
|
||||
|
||||
```yaml
|
||||
---
|
||||
name: skill-name # lowercase, hyphens, max 64 chars
|
||||
description: >- # max 1024 chars, non-empty — see below
|
||||
...
|
||||
---
|
||||
```
|
||||
|
||||
**Description** (discovery is everything — third person, WHAT + WHEN, trigger words):
|
||||
|
||||
- Good: `Extracts tables from PDFs and fills forms. Use when the user works with PDFs, forms, or document extraction.`
|
||||
- Bad: `Helps with documents` or `I can help you with PDFs`
|
||||
|
||||
## Authoring rules
|
||||
|
||||
1. **Concise** — assume the model is capable; only add non-obvious domain or project facts.
|
||||
2. **Progressive disclosure** — essentials in `SKILL.md`; long reference in `reference.md`. Link **one level deep** from `SKILL.md`.
|
||||
3. **Prefer one default** — e.g. one library or one workflow; add an escape hatch only if needed.
|
||||
4. **Stable wording** — one term per concept; avoid dated “until month X” notes unless you tuck legacy bits behind a short “Deprecated” note.
|
||||
5. **Paths** — forward slashes only (`scripts/foo.py`).
|
||||
|
||||
**Rough size:** aim for **well under ~200 lines** in `SKILL.md`; if it grows, split detail out.
|
||||
|
||||
### Scope: one job per skill (and parent skills)
|
||||
|
||||
- **Single responsibility** — one primary workflow or decision tree per skill. If triggers and steps diverge a lot (e.g. “create issue” vs “create PR” vs “full ticket → PR flow”), split into **smaller dedicated skills**.
|
||||
- **Prefer small + compose** — two or three focused skills keep irrelevant detail out of context until needed. A **parent** (orchestrator) skill can say *when* to follow each child workflow and link to their `SKILL.md`; avoid pasting full child content into the parent.
|
||||
- **When one large skill is OK** — a single end-to-end flow that always runs together and shares one tight checklist;
|
||||
|
||||
### MCPs, CLI tools, and other skills
|
||||
|
||||
- **Prefer CLI and repo commands** when they solve the same problem — agents handle them well and they usually add less scaffolding noise to context than MCP tool discovery and schemas. Examples: `gh` for PRs/issues, `pnpm` scripts from `AGENTS.md`.
|
||||
- **MCPs are optional per user** — not everyone has the same servers enabled. If a skill **requires** a specific MCP to work as written, say so explicitly:
|
||||
- Put a hint in the **frontmatter description** (e.g. “Requires Linear MCP for …”) so mismatches are obvious early.
|
||||
- Add a short **Prerequisites** (or **Requirements**) block near the top: which integration, what it is used for, and a **fallback** (e.g. web UI, `gh`, or “ask the user to paste …”) when it is missing.
|
||||
- **Referencing other skills** — use the namespaced invocation name (e.g. `n8n:create-issue`) so the agent resolves the plugin skill. For human-readable links, give the path from the repo root (e.g. `.claude/plugins/n8n/skills/create-issue/SKILL.md`). From a sibling folder, a relative link works too: `[create-issue](../create-issue/SKILL.md)`. Parent skills should delegate steps instead of duplicating long procedures.
|
||||
|
||||
## Patterns (pick what fits)
|
||||
|
||||
- **Template** — give the exact output shape (markdown/code blocks).
|
||||
- **Checklist** — numbered or `- [ ]` steps for multi-step work.
|
||||
- **Branching** — “If A → …; if B → …” at the top of a workflow.
|
||||
- **Scripts** — document run commands; say whether to **execute** or **read** the script.
|
||||
|
||||
## Workflow: create → verify
|
||||
|
||||
1. **Name + description** — hyphenated name; description with triggers.
|
||||
2. **Outline** — minimal sections; link optional files.
|
||||
3. **Implement** — `SKILL.md` first; add `reference.md` / `scripts/` only if they save tokens or reduce errors.
|
||||
4. **Check** — third-person description; terminology consistent; no duplicate encyclopedic content the model already knows.
|
||||
|
||||
## Anti-patterns
|
||||
|
||||
- Verbose tutorials (“what is a PDF”) inside the skill.
|
||||
- Many equivalent options with no default.
|
||||
- Vague names (`helper`, `utils`).
|
||||
- Deep chains of linked files.
|
||||
- Assuming an MCP or tool is present without stating it or offering a fallback.
|
||||
- One oversized skill that mixes unrelated workflows instead of smaller skills + a thin parent.
|
||||
|
||||
## Quick example stub
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: my-workflow
|
||||
description: Does X using project convention Y. Use when the user asks for X or mentions Z.
|
||||
---
|
||||
|
||||
# My workflow
|
||||
|
||||
1. …
|
||||
2. …
|
||||
|
||||
## Output format
|
||||
|
||||
Use a fenced code block for the exact shape reviewers should see.
|
||||
|
||||
## More detail
|
||||
See [reference.md](reference.md) if edge cases matter.
|
||||
```
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
---
|
||||
name: n8n:design-system
|
||||
description: Guidelines on using Design System styles and components. Use when working on .vue files in packages/frontend. Triggers for tasks that include component architecture, styling, UI changes, or feature work.
|
||||
---
|
||||
|
||||
# Design System
|
||||
|
||||
Comprehensive guide for building, styling, and using components in the frontend.
|
||||
|
||||
## When to Apply
|
||||
Reference these guidelines when:
|
||||
- Working on `.{vue|css|scss}` files in `packages/frontend`
|
||||
- Adding new components to `packages/frontend/@n8n/design-system`
|
||||
- Refactoring styles for Vue components
|
||||
- Implementing new UI components or features
|
||||
- Reviewing changes to UI
|
||||
|
||||
## Rules
|
||||
- Follow guidelines in `packages/frontend/@n8n/design-system/src/styleguide/*.mdx`
|
||||
- ALWAYS use CSS variables for styles from `packages/frontend/@n8n/design-system/src/css/_tokens.scss` or `packages/frontend/@n8n/design-system/src/css/_primtivies.scss`. Use hard-coded values only when no suitable tokens.
|
||||
- ALWAYS prefer using existing components from `packages/frontend/@n8n/design-system/src/components`. Prefer components that aren't marked `@deprecated`.
|
||||
- Use `light-dark()` when alternating colors for ligh/dark mode
|
||||
- When working with animations or transitions, ALWAYS prefer using mixins from `packages/frontend/@n8n/design-system/src/css/mixins/motion.scss`
|
||||
- When reviewing animations, follow the guides in `rules/web-animation-guidelines.md`
|
||||
- When reviewing UI changes or adding new components, follow `rules/web-interface-guidelines.md`
|
||||
|
||||
## Examples
|
||||
- "Add a modal dialog for confirming workflow deletion" → Use `N8nDialog`
|
||||
- "Add a dropdown to select workflow status" → Use `N8nDropdown` or `N8nSelect`
|
||||
- "Add button with + icon to add new tiem" → Wrap `N8nButton` with `iconOnly` prop with `N8nTooltip` and wrap in `N8nTooltip`. Use `N8nIcon` and proper aria-label.
|
||||
- "Add a destructive action button" → use `N8nButton` with `variant="destructive"`
|
||||
- "Make background color white/black" → Use `var(--background--surface)` for white on light mode and "black" on dark mode
|
||||
- "Animate the title in gracefully" -> Use `fade-in-up` mixin from `motion.scss` with `var(--duration--base)`
|
||||
|
|
@ -1,93 +0,0 @@
|
|||
# Web Motion Guidelines
|
||||
Design and implement web animations that feel natural and purposeful
|
||||
|
||||
## Timing and Duration
|
||||
|
||||
## Duration Guidelines
|
||||
|
||||
| Element Type | Duration |
|
||||
| --------------------------------- | --------- |
|
||||
| Micro-interactions | 100-150ms |
|
||||
| Standard UI (tooltips, dropdowns) | 150-250ms |
|
||||
| Modals, drawers | 200-300ms |
|
||||
|
||||
**Rules:**
|
||||
|
||||
- UI animations should stay under 300ms
|
||||
- Larger elements animate slower than smaller ones
|
||||
- Exit animations can be ~20% faster than entrance
|
||||
- Match duration to distance - longer travel = longer duration
|
||||
|
||||
### The Frequency
|
||||
|
||||
Determine how often users will see the animation:
|
||||
|
||||
- **100+ times/day** → No animation (or drastically reduced)
|
||||
- **Occasional use** → Standard animation
|
||||
- **Rare/first-time** → Can be more special
|
||||
|
||||
**Example:** Raycast never animates because users open it hundreds of times a day.
|
||||
|
||||
## When to Animate
|
||||
|
||||
**Do animate:**
|
||||
|
||||
- Enter/exit transitions for spatial consistency
|
||||
- State changes that benefit from visual continuity
|
||||
- Responses to user actions (feedback)
|
||||
- Rarely-used interactions where delight adds value
|
||||
|
||||
**Don't animate:**
|
||||
|
||||
- Keyboard-initiated actions
|
||||
- Hover effects on frequently-used elements
|
||||
- Anything users interact with 100+ times daily
|
||||
- When speed matters more than smoothness
|
||||
|
||||
## Performance
|
||||
|
||||
Prefer animating `transform` and `opacity`. These skip layout and paint stages, running entirely on the GPU.
|
||||
|
||||
**Avoid animating:**
|
||||
|
||||
- `padding`, `margin`, `height`, `width` (trigger layout)
|
||||
- `blur` filters above 20px (expensive, especially Safari)
|
||||
- CSS variables in deep component trees
|
||||
|
||||
### Optimization Techniques
|
||||
|
||||
```css
|
||||
/* Force GPU acceleration */
|
||||
.animated-element {
|
||||
will-change: transform;
|
||||
}
|
||||
```
|
||||
|
||||
## Practical Tips
|
||||
|
||||
Quick reference for common scenarios. See [PRACTICAL-TIPS.md](PRACTICAL-TIPS.md) for detailed implementations.
|
||||
|
||||
| Scenario | Solution |
|
||||
| ------------------------------- | ----------------------------------------------- |
|
||||
| Make buttons feel responsive | Add `transform: scale(0.97)` on `:active` |
|
||||
| Element appears from nowhere | Start from `scale(0.95)`, not `scale(0)` |
|
||||
| Shaky/jittery animations | Add `will-change: transform` |
|
||||
| Hover causes flicker | Animate child element, not parent |
|
||||
| Popover scales from wrong point | Set `transform-origin` to trigger location |
|
||||
| Sequential tooltips feel slow | Skip delay/animation after first tooltip |
|
||||
| Small buttons hard to tap | Use 44px minimum hit area (pseudo-element) |
|
||||
| Something still feels off | Add subtle blur (under 20px) to mask it |
|
||||
| Hover triggers on mobile | Use `@media (hover: hover) and (pointer: fine)` |
|
||||
|
||||
## Easing Decision Flowchart
|
||||
|
||||
Is the element entering or exiting the viewport?
|
||||
├── Yes → ease-out
|
||||
└── No
|
||||
├── Is it moving/morphing on screen?
|
||||
│ └── Yes → ease-in-out
|
||||
└── Is it a hover change?
|
||||
├── Yes → ease
|
||||
└── Is it constant motion?
|
||||
├── Yes → linear
|
||||
└── Default → ease-out
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
# Web Interface Guidelines
|
||||
<!-- credit to https://github.com/raunofreiberg/interfaces -->
|
||||
This document outlines a non-exhaustive list of details that make a good (web) interface. It is a living document, periodically updated based on learnings. Some of these may be subjective, but most apply to all websites.
|
||||
|
||||
The [WAI-ARIA](https://www.w3.org/TR/wai-aria-1.1/) spec is deliberately not duplicated in this document. However, some accessibility guidelines may be pointed out. Contributions are welcome. Edit [this file](https://github.com/raunofreiberg/interfaces/blob/main/README.md) and submit a pull request.
|
||||
|
||||
## Interactivity
|
||||
|
||||
- Clicking the input label should focus the input field
|
||||
- Inputs should be wrapped with a `<form>` to submit by pressing Enter
|
||||
- Inputs should have an appropriate `type` like `password`, `email`, etc
|
||||
- Inputs should disable `spellcheck` and `autocomplete` attributes most of the time
|
||||
- Inputs should leverage HTML form validation by using the `required` attribute when appropriate
|
||||
- Input prefix and suffix decorations, such as icons, should be absolutely positioned on top of the text input with padding, not next to it, and trigger focus on the input
|
||||
- Toggles should immediately take effect, not require confirmation
|
||||
- Buttons should be disabled after submission to avoid duplicate network requests
|
||||
- Interactive elements should disable `user-select` for inner content
|
||||
- Decorative elements (glows, gradients) should disable `pointer-events` to not hijack events
|
||||
- Interactive elements in a vertical or horizontal list should have no dead areas between each element, instead, increase their `padding`
|
||||
|
||||
## Typography
|
||||
|
||||
- Fonts should have `-webkit-font-smoothing: antialiased` applied for better legibility
|
||||
- Fonts should have `text-rendering: optimizeLegibility` applied for better legibility
|
||||
- Fonts should be subset based on the content, alphabet or relevant language(s)
|
||||
- Font weight should not change on hover or selected state to prevent layout shift
|
||||
- Font weights below 400 should not be used
|
||||
- Medium sized headings generally look best with a font weight between 500-600
|
||||
- Adjust values fluidly by using CSS [`clamp()`](https://developer.mozilla.org/en-US/docs/Web/CSS/clamp), e.g. `clamp(48px, 5vw, 72px)` for the `font-size` of a heading
|
||||
- Where available, tabular figures should be applied with `font-variant-numeric: tabular-nums`, particularly in tables or when layout shifts are undesirable, like in timers
|
||||
- Prevent text resizing unexpectedly in landscape mode on iOS with `-webkit-text-size-adjust: 100%`
|
||||
|
||||
|
||||
## Motion
|
||||
|
||||
- Switching themes should not trigger transitions and animations on elements [^1]
|
||||
- Animation duration should not be more than 200ms for interactions to feel immediate
|
||||
- Animation values should be proportional to the trigger size:
|
||||
- Don't animate dialog scale in from 0 → 1, fade opacity and scale from ~0.8
|
||||
- Don't scale buttons on press from 1 → 0.8, but ~0.96, ~0.9, or so
|
||||
- Actions that are frequent and low in novelty should avoid extraneous animations: [^2]
|
||||
- Opening a right click menu
|
||||
- Deleting or adding items from a list
|
||||
- Hovering trivial buttons
|
||||
- Looping animations should pause when not visible on the screen to offload CPU and GPU usage
|
||||
- Use `scroll-behavior: smooth` for navigating to in-page anchors, with an appropriate offset
|
||||
|
||||
## Touch
|
||||
|
||||
- Hover states should not be visible on touch press, use `@media (hover: hover)` [^3]
|
||||
- Font size for inputs should not be smaller than 16px to prevent iOS zooming on focus
|
||||
- Inputs should not auto focus on touch devices as it will open the keyboard and cover the screen
|
||||
- Apply `muted` and `playsinline` to `<video />` tags to auto play on iOS
|
||||
- Disable `touch-action` for custom components that implement pan and zoom gestures to prevent interference from native behavior like zooming and scrolling
|
||||
- Disable the default iOS tap highlight with `-webkit-tap-highlight-color: rgba(0,0,0,0)`, but always replace it with an appropriate alternative
|
||||
|
||||
## Optimizations
|
||||
|
||||
- Large `blur()` values for `filter` and `backdrop-filter` may be slow
|
||||
- Scaling and blurring filled rectangles will cause banding, use radial gradients instead
|
||||
- Sparingly enable GPU rendering with `transform: translateZ(0)` for unperformant animations
|
||||
- Toggle `will-change` on unperformant scroll animations for the duration of the animation [^4]
|
||||
- Auto-playing too many videos on iOS will choke the device, pause or even unmount off-screen videos
|
||||
- Bypass React's render lifecycle with refs for real-time values that can commit to the DOM directly [^5]
|
||||
- [Detect and adapt](https://github.com/GoogleChromeLabs/react-adaptive-hooks) to the hardware and network capabilities of the user's device
|
||||
|
||||
## Accessibility
|
||||
|
||||
- Disabled buttons should not have tooltips, they are not accessible [^6]
|
||||
- Focusable elements in a sequential list should be navigable with <kbd>↑</kbd> <kbd>↓</kbd>
|
||||
- Focusable elements in a sequential list should be deletable with <kbd>⌘</kbd> <kbd>Backspace</kbd>
|
||||
- To open immediately on press, dropdown menus should trigger on `mousedown`, not `click`
|
||||
- Use a svg favicon with a style tag that adheres to the system theme based on `prefers-color-scheme`
|
||||
- Icon only interactive elements should define an explicit `aria-label`
|
||||
- Tooltips triggered by hover should not contain interactive content
|
||||
- Images should always be rendered with `<img>` for screen readers and ease of copying from the right click menu
|
||||
- Illustrations built with HTML should have an explicit `aria-label` instead of announcing the raw DOM tree to people using screen readers
|
||||
- Gradient text should unset the gradient on `::selection` state
|
||||
- When using nested menus, use a "prediction cone" to prevent the pointer from accidentally closing the menu when moving across other elements.
|
||||
|
||||
|
||||
## Design
|
||||
|
||||
- Optimistically update data locally and roll back on server error with feedback
|
||||
- Authentication redirects should happen on the server before the client loads to avoid janky URL changes
|
||||
- Style the document selection state with `::selection`
|
||||
- Display feedback relative to its trigger:
|
||||
- Show a temporary inline checkmark on a successful copy, not a notification
|
||||
- Highlight the relevant input(s) on form error(s)
|
||||
- Empty states should prompt to create a new item, with optional templates
|
||||
|
||||
[^1]: Switching between dark mode or light mode will trigger transitions on elements that are meant for explicit interactions like hover. We can [disable transitions temporarily](https://paco.me/writing/disable-theme-transitions) to prevent this. For Next.js, use [next-themes](https://github.com/pacocoursey/next-themes) which prevents transitions out of the box.
|
||||
[^2]: This is a matter of taste but some interactions just feel better with no motion. For example, the native macOS right click menu only animates out, not in, due to the frequent usage of it.
|
||||
[^3]: Most touch devices on press will temporarily flash the hover state, unless explicitly only defined for pointer devices with [`@media (hover: hover)`](https://developer.mozilla.org/en-US/docs/Web/CSS/@media/hover).
|
||||
[^4]: Use [`will-change`](https://developer.mozilla.org/en-US/docs/Web/CSS/will-change) as a last resort to improve performance. Pre-emptively throwing it on elements for better performance may have the opposite effect.
|
||||
[^5]: This might be controversial but sometimes it can be beneficial to manipulate the DOM directly. For example, instead of relying on React re-rendering on every wheel event, we can track the delta in a ref and update relevant elements directly in the callback.
|
||||
[^6]: Disabled buttons do not appear in tab order in the DOM so the tooltip will never be announced for keyboard users and they won't know why the button is disabled.
|
||||
[^7]: As of 2023, Safari will not take the border radius of an element into account when defining custom outline styles. [Safari 16.4](https://developer.apple.com/documentation/safari-release-notes/safari-16_4-release-notes) has added support for `outline` following the curve of border radius. However, keep in mind that not everyone updates their OS immediately.
|
||||
|
|
@ -1,198 +0,0 @@
|
|||
---
|
||||
name: n8n:linear-issue
|
||||
description: Fetch and analyze Linear issue with all related context. Use when starting work on a Linear ticket, analyzing issues, or gathering context about a Linear issue.
|
||||
argument-hint: "[issue-id]"
|
||||
compatibility:
|
||||
requires:
|
||||
- mcp: linear
|
||||
description: Core dependency — used to fetch issue details, relations, and comments
|
||||
- cli: gh
|
||||
description: GitHub CLI — used to fetch linked PRs and issues. Must be authenticated (gh auth login)
|
||||
optional:
|
||||
- mcp: notion
|
||||
description: Used to fetch linked Notion documents. Skip Notion steps if unavailable.
|
||||
- skill: loom-transcript
|
||||
description: Used to fetch Loom video transcripts. Skip Loom steps if unavailable.
|
||||
- cli: curl
|
||||
description: Used to download images/attachments. Typically pre-installed.
|
||||
---
|
||||
|
||||
# Linear Issue Analysis
|
||||
|
||||
Start work on Linear issue **$ARGUMENTS**
|
||||
|
||||
## Prerequisites
|
||||
|
||||
This skill depends on external tools. Before proceeding, verify availability:
|
||||
|
||||
**Required:**
|
||||
- **Linear MCP** (`mcp__linear`): Must be connected. Without it the skill cannot function at all.
|
||||
- **GitHub CLI** (`gh`): Must be installed and authenticated. Run `gh auth status` to verify. Used to fetch linked PRs and issues.
|
||||
|
||||
**Optional (graceful degradation):**
|
||||
- **Notion MCP** (`mcp__notion`): Needed only if the issue links to Notion docs. If unavailable, note the Notion links in the summary and tell the user to check them manually.
|
||||
- **Loom transcript skill** (`/loom-transcript`): Needed only if the issue contains Loom videos. If unavailable, note the Loom links in the summary for the user to watch.
|
||||
- **curl**: Used to download images. Almost always available; if missing, skip image downloads and note it.
|
||||
|
||||
If a required tool is missing, stop and tell the user what needs to be set up before continuing.
|
||||
|
||||
## Instructions
|
||||
|
||||
Follow these steps to gather comprehensive context about the issue:
|
||||
|
||||
### 1. Fetch the Issue and Comments from Linear
|
||||
|
||||
Use the Linear MCP tools to fetch the issue details and comments together:
|
||||
|
||||
- Use `mcp__linear__get_issue` with the issue ID to get full details including attachments
|
||||
- Include relations to see blocking/related/duplicate issues
|
||||
- **Immediately after**, use `mcp__linear__list_comments` with the issue ID to fetch all comments
|
||||
|
||||
Both calls should be made together in the same step to gather the complete context upfront.
|
||||
|
||||
### 2. Check for Private/Security Issues (MANDATORY — do this before anything else)
|
||||
|
||||
After fetching the issue, immediately check its labels:
|
||||
|
||||
1. Look at the labels returned with the issue.
|
||||
2. If any label is **`n8n-private`**:
|
||||
a. Run `git remote -v` (via Bash) to list all configured remotes.
|
||||
b. If **any** remote URL contains `n8n-io/n8n` without the `-private` suffix (i.e. matches the public repo), **stop immediately** and tell the user:
|
||||
|
||||
> **This issue is marked `n8n-private` and must be developed in a clean clone of the private repository.**
|
||||
>
|
||||
> One or more of your remotes point to the **public** `n8n-io/n8n` repo. Mixed remotes are not allowed — you must work in a **separate local clone** of `n8n-io/n8n-private` with no references to the public repo.
|
||||
> For the full process, see: https://www.notion.so/n8n/Processing-critical-high-security-bugs-vulnerabilities-in-private-2f45b6e0c94f803da806f472111fb1a5
|
||||
|
||||
Do **not** continue with any further steps — return after showing this message.
|
||||
|
||||
3. If the label is not present, or all remotes point exclusively to `n8n-io/n8n-private`, continue normally.
|
||||
|
||||
### 3. Analyze Attachments and Media (MANDATORY)
|
||||
|
||||
**IMPORTANT:** This step is NOT optional. You MUST scan and fetch all visual content from BOTH the issue description AND all comments.
|
||||
|
||||
**Screenshots/Images (ALWAYS fetch):**
|
||||
|
||||
1. Scan the issue description AND all comments for ALL image URLs:
|
||||
- `<img>` tags
|
||||
- Markdown images ``
|
||||
- Raw URLs (github.com/user-attachments, imgur.com, etc.)
|
||||
2. For EACH image found (in description or comments):
|
||||
- Download using `curl -sL "url" -o /path/to/image.png` (GitHub URLs require following redirects) OR the linear mcp
|
||||
- Use the `Read` tool on the downloaded file to view it
|
||||
- Describe what you see in detail
|
||||
3. Do NOT skip images - they often contain critical context like error messages, UI states, or configuration
|
||||
|
||||
**Loom Videos (ALWAYS fetch transcript):**
|
||||
|
||||
1. Scan the issue description AND all comments for Loom URLs (loom.com/share/...)
|
||||
2. For EACH Loom video found (in description or comments):
|
||||
- Use the `/loom-transcript` skill to fetch the FULL transcript
|
||||
- Summarize key points, timestamps, and any demonstrated issues
|
||||
3. Loom videos often contain crucial reproduction steps and context that text alone cannot convey
|
||||
|
||||
### 4. Fetch Related Context
|
||||
|
||||
**Related Linear Issues:**
|
||||
- Use `mcp__linear__get_issue` for any issues mentioned in relations (blocking, blocked by, related, duplicates)
|
||||
- Summarize how they relate to the main issue
|
||||
|
||||
**GitHub PRs and Issues:**
|
||||
- If GitHub links are mentioned, use `gh` CLI to fetch PR/issue details:
|
||||
- `gh pr view <number>` for pull requests
|
||||
- `gh issue view <number>` for issues
|
||||
- Download images attached to issues: `curl -H "Authorization: token $(gh auth token)" -L <image-url> -o image.png`
|
||||
|
||||
**Notion Documents:**
|
||||
- If Notion links are present, use `mcp__notion__notion-fetch` with the Notion URL or page ID to retrieve document content
|
||||
- Summarize relevant documentation
|
||||
|
||||
### 5. Review Comments
|
||||
|
||||
Comments were already fetched in Step 1. Review them for:
|
||||
- Additional context and discussion history
|
||||
- Any attachments or media linked in comments (process in Step 3)
|
||||
- Clarifications or updates to the original issue description
|
||||
|
||||
### 6. Identify Affected Node (if applicable)
|
||||
|
||||
Determine whether this issue is specific to a particular n8n node (e.g. a trigger, action, or tool node). Look for clues in:
|
||||
- The issue title (e.g. "Linear trigger", "Slack node", "HTTP Request")
|
||||
- The issue description and comments mentioning node names
|
||||
- Labels or tags on the issue (e.g. `node:linear`, `node:slack`)
|
||||
- Screenshots showing a specific node's configuration or error
|
||||
|
||||
If the issue is node-specific:
|
||||
|
||||
1. **Find the node type ID.** Use `Grep` to search for the node's display name (or keywords from it) in `packages/frontend/editor-ui/data/node-popularity.json` to find the exact node type ID. For reference, common ID patterns are:
|
||||
- Core nodes: `n8n-nodes-base.<camelCaseName>` (e.g. "HTTP Request" → `n8n-nodes-base.httpRequest`)
|
||||
- Trigger variants: `n8n-nodes-base.<name>Trigger` (e.g. "Gmail Trigger" → `n8n-nodes-base.gmailTrigger`)
|
||||
- Tool variants: `n8n-nodes-base.<name>Tool` (e.g. "Google Sheets Tool" → `n8n-nodes-base.googleSheetsTool`)
|
||||
- LangChain/AI nodes: `@n8n/n8n-nodes-langchain.<camelCaseName>` (e.g. "OpenAI Chat Model" → `@n8n/n8n-nodes-langchain.lmChatOpenAi`)
|
||||
|
||||
2. **Look up the node's popularity score** — first check for a Flaky assessment (see below), otherwise use the popularity file:
|
||||
|
||||
**Primary: Check for Flaky's assessment in Linear comments.** Flaky is an auto-triage agent that posts issue analysis as a comment. Search the comments already fetched in Step 1 for a comment from a user named "Flaky" (or containing "Flaky" in the author name) — do not re-fetch comments. If found, extract the popularity score and level directly from Flaky's analysis and use those values.
|
||||
|
||||
**Fallback (if no Flaky comment exists):** Look up the node's popularity score from `packages/frontend/editor-ui/data/node-popularity.json`. Use `Grep` to search for the node ID in that file. The popularity score is a log-scale value between 0 and 1. Use these thresholds to classify:
|
||||
|
||||
| Score | Level | Description | Examples |
|
||||
|-------|-------|-------------|----------|
|
||||
| ≥ 0.8 | **High** | Core/widely-used nodes, top ~5% | HTTP Request (0.98), Google Sheets (0.95), Postgres (0.83), Gmail Trigger (0.80) |
|
||||
| 0.4–0.8 | **Medium** | Regularly used integrations | Slack (0.78), GitHub (0.64), Jira (0.65), MongoDB (0.63) |
|
||||
| < 0.4 | **Low** | Niche or rarely used nodes | Amqp (0.34), Wise (0.36), CraftMyPdf (0.33) |
|
||||
|
||||
Include the raw score and the level (high/medium/low) in the summary, and note whether it came from Flaky or the popularity file.
|
||||
|
||||
3. If the node is **not found** in the popularity file (and no Flaky comment exists), note that it may be a community node or a very new/niche node.
|
||||
|
||||
### 7. Assess Effort/Complexity
|
||||
|
||||
**Primary: Check for Flaky's effort estimate in Linear comments.** Search the comments already fetched in Step 1 for a Flaky comment — do not re-fetch. If found, extract the effort/complexity estimate directly from it and use that as your assessment.
|
||||
|
||||
**Fallback (if no Flaky comment exists):** After gathering all context, assess the effort required to fix/implement the issue. Use the following T-shirt sizes:
|
||||
|
||||
| Size | Approximate effort |
|
||||
|------|--------------------|
|
||||
| XS | ≤ 1 hour |
|
||||
| S | ≤ 1 day |
|
||||
| M | 2-3 days |
|
||||
| L | 3-5 days |
|
||||
| XL | ≥ 6 days |
|
||||
|
||||
To make this assessment, consider:
|
||||
- **Scope of changes**: How many files/packages need to be modified? Is it a single node fix or a cross-cutting change?
|
||||
- **Complexity**: Is it a straightforward parameter change, a new API integration, a new credential type, or an architectural change?
|
||||
- **Testing**: How much test coverage is needed? Are E2E tests required?
|
||||
- **Risk**: Could this break existing functionality? Does it need backward compatibility?
|
||||
- **Dependencies**: Are there external API changes, new packages, or cross-team coordination needed?
|
||||
- **Documentation**: Does this require docs updates, migration guides, or changelog entries?
|
||||
|
||||
Provide the T-shirt size along with a brief justification explaining the key factors that drove the estimate. Note whether it came from Flaky or your own assessment.
|
||||
|
||||
### 8. Present Summary
|
||||
|
||||
**Before presenting, verify you have completed:**
|
||||
- [ ] Downloaded and viewed ALL images in the description AND comments
|
||||
- [ ] Fetched transcripts for ALL Loom videos in the description AND comments
|
||||
- [ ] Fetched ALL linked GitHub issues/PRs via `gh` CLI
|
||||
- [ ] Listed all comments on the issue
|
||||
- [ ] Checked whether the issue is node-specific and looked up popularity if so
|
||||
- [ ] Assessed effort/complexity with T-shirt size
|
||||
|
||||
After gathering all context, present a comprehensive summary including:
|
||||
|
||||
1. **Issue Overview**: Title, status, priority, assignee, labels
|
||||
2. **Description**: Full issue description with any clarifications from comments
|
||||
3. **Visual Context**: Summary of screenshots/videos (what you observed in each)
|
||||
4. **Affected Node** (if applicable): Node name, node type ID (`n8n-nodes-base.xxx`), popularity score with level (e.g. `0.64 — medium popularity`)
|
||||
5. **Related Issues**: How this connects to other work
|
||||
6. **Technical Context**: Any PRs, code references, or documentation
|
||||
7. **Effort Estimate**: T-shirt size (XS/S/M/L/XL) with justification
|
||||
8. **Next Steps**: Suggested approach based on all gathered context
|
||||
|
||||
## Notes
|
||||
|
||||
- The issue ID can be provided in formats like: `AI-1975`, `node-1975`, or just `1975` (will search)
|
||||
- If no issue ID is provided, ask the user for one
|
||||
|
|
@ -1,105 +0,0 @@
|
|||
---
|
||||
name: n8n:loom-transcript
|
||||
description: Fetch and display the full transcript from a Loom video URL. Use when the user wants to get or read a Loom transcript.
|
||||
argument-hint: [loom-url]
|
||||
---
|
||||
|
||||
# Loom Transcript Fetcher
|
||||
|
||||
Fetch the transcript from a Loom video using Loom's GraphQL API.
|
||||
|
||||
## Instructions
|
||||
|
||||
Given the Loom URL: $ARGUMENTS
|
||||
|
||||
### 1. Extract the Video ID
|
||||
|
||||
Parse the Loom URL to extract the 32-character hex video ID. Supported URL formats:
|
||||
- `https://www.loom.com/share/<video-id>`
|
||||
- `https://www.loom.com/embed/<video-id>`
|
||||
- `https://www.loom.com/share/<video-id>?sid=<session-id>`
|
||||
|
||||
The video ID is the 32-character hex string after `/share/` or `/embed/`.
|
||||
|
||||
### 2. Fetch Video Metadata
|
||||
|
||||
Use the `WebFetch` tool to POST to `https://www.loom.com/graphql` to get the video title and details.
|
||||
|
||||
Use this curl command via Bash:
|
||||
|
||||
```bash
|
||||
curl -s 'https://www.loom.com/graphql' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'x-loom-request-source: loom_web_45a5bd4' \
|
||||
-H 'apollographql-client-name: web' \
|
||||
-H 'apollographql-client-version: 45a5bd4' \
|
||||
-d '{
|
||||
"operationName": "GetVideoSSR",
|
||||
"variables": {"id": "<VIDEO_ID>", "password": null},
|
||||
"query": "query GetVideoSSR($id: ID!, $password: String) { getVideo(id: $id, password: $password) { ... on RegularUserVideo { id name description createdAt owner { display_name } } } }"
|
||||
}'
|
||||
```
|
||||
|
||||
### 3. Fetch the Transcript URLs
|
||||
|
||||
Use curl via Bash to call the GraphQL API:
|
||||
|
||||
```bash
|
||||
curl -s 'https://www.loom.com/graphql' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'x-loom-request-source: loom_web_45a5bd4' \
|
||||
-H 'apollographql-client-name: web' \
|
||||
-H 'apollographql-client-version: 45a5bd4' \
|
||||
-d '{
|
||||
"operationName": "FetchVideoTranscript",
|
||||
"variables": {"videoId": "<VIDEO_ID>", "password": null},
|
||||
"query": "query FetchVideoTranscript($videoId: ID!, $password: String) { fetchVideoTranscript(videoId: $videoId, password: $password) { ... on VideoTranscriptDetails { id video_id source_url captions_source_url } ... on GenericError { message } } }"
|
||||
}'
|
||||
```
|
||||
|
||||
Replace `<VIDEO_ID>` with the actual video ID extracted in step 1.
|
||||
|
||||
The response contains:
|
||||
- `source_url` — JSON transcript URL
|
||||
- `captions_source_url` — VTT (WebVTT) captions URL
|
||||
|
||||
### 4. Download and Parse the Transcript
|
||||
|
||||
Fetch **both** URLs returned from step 3 (if available):
|
||||
|
||||
1. **VTT captions** (`captions_source_url`): Download with `curl -sL "<url>"`. This is a WebVTT file with timestamps and text.
|
||||
2. **JSON transcript** (`source_url`): Download with `curl -sL "<url>"`. This is a JSON file with transcript segments.
|
||||
|
||||
Prefer the VTT captions as the primary source since they include proper timestamps. Fall back to the JSON transcript if VTT is unavailable.
|
||||
|
||||
### 5. Present the Transcript
|
||||
|
||||
Format and present the full transcript to the user:
|
||||
|
||||
**Video:** [Title from metadata]
|
||||
**Author:** [Owner name]
|
||||
**Date:** [Created date]
|
||||
|
||||
---
|
||||
|
||||
**0:00** - First transcript segment text...
|
||||
|
||||
**0:14** - Second transcript segment text...
|
||||
|
||||
(continue for all segments)
|
||||
|
||||
---
|
||||
|
||||
## Error Handling
|
||||
|
||||
- If the GraphQL response contains a `GenericError`, report the error message to the user.
|
||||
- If both `source_url` and `captions_source_url` are null/missing, tell the user that no transcript is available for this video.
|
||||
- If the video URL is invalid or the ID cannot be extracted, ask the user for a valid Loom URL.
|
||||
|
||||
## Notes
|
||||
|
||||
- No authentication or cookies are required — Loom's transcript API is publicly accessible.
|
||||
- Only English transcripts are available through this API.
|
||||
- Transcripts are auto-generated and may contain minor errors.
|
||||
|
|
@ -1,322 +0,0 @@
|
|||
---
|
||||
name: n8n:node-add-oauth
|
||||
description: Add OAuth2 credential support to an existing n8n node — creates the credential file, updates the node, adds tests, and keeps the CLI constant in sync. Use when the user says /node-add-oauth.
|
||||
argument-hint: "[node-name] [optional: custom-scopes flag or scope list]"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Add OAuth2 (Authorization Code / 3LO) support to an existing n8n node. Works for any
|
||||
third-party service that supports standard OAuth2.
|
||||
|
||||
Before starting, read comparable existing OAuth2 credential files and tests under
|
||||
`packages/nodes-base/credentials/` to understand the conventions used in this codebase
|
||||
(e.g. `DiscordOAuth2Api.credentials.ts`, `MicrosoftTeamsOAuth2Api.credentials.ts`).
|
||||
|
||||
---
|
||||
|
||||
## Step 0 — Parse arguments
|
||||
|
||||
Extract:
|
||||
- `NODE_NAME`: the service name (e.g. `GitHub`, `Notion`). Try to infer from the argument;
|
||||
if ambiguous, ask the user.
|
||||
- `CUSTOM_SCOPES`: whether the credential should support user-defined scopes. If the
|
||||
argument does not make this clear, **ask the user** before proceeding:
|
||||
> "Should users be able to customise the OAuth2 scopes for this credential, or should
|
||||
> scopes be fixed?"
|
||||
|
||||
---
|
||||
|
||||
## Step 1 — Explore the node
|
||||
|
||||
Read the following (adjust path conventions for the specific service):
|
||||
|
||||
1. Node directory: `packages/nodes-base/nodes/{NODE_NAME}/`
|
||||
- Find `*.node.ts` (main node) and any `*Trigger.node.ts`
|
||||
- Find `GenericFunctions.ts` (may be named differently)
|
||||
- Check if an `auth` / `version` subdirectory exists
|
||||
2. Existing credentials: `packages/nodes-base/credentials/` — look for existing
|
||||
`{NODE_NAME}*Api.credentials.ts` files to understand the naming convention and any
|
||||
auth method already in use.
|
||||
3. `package.json` at `packages/nodes-base/package.json` — find where existing credentials
|
||||
for this node are registered (grep for the node name).
|
||||
|
||||
---
|
||||
|
||||
## Step 2 — Research OAuth2 endpoints
|
||||
|
||||
Look up the service's OAuth2 documentation:
|
||||
- Authorization URL
|
||||
- Access Token URL
|
||||
- Required auth query parameters (e.g. `prompt=consent`, `access_type=offline`)
|
||||
- Default scopes needed for the node's existing operations
|
||||
- Whether the API requires a cloudId / workspace ID lookup after the token exchange
|
||||
(Atlassian-style gateway APIs do; most services don't)
|
||||
|
||||
If you can't determine the endpoints confidently, ask the user to provide them.
|
||||
|
||||
---
|
||||
|
||||
## Step 3 — Create the credential file
|
||||
|
||||
File: `packages/nodes-base/credentials/{NODE_NAME}OAuth2Api.credentials.ts`
|
||||
|
||||
```typescript
|
||||
import type { ICredentialType, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
const defaultScopes = [/* minimum scopes for existing node operations */];
|
||||
|
||||
export class {NODE_NAME}OAuth2Api implements ICredentialType {
|
||||
name = '{camelCase}OAuth2Api';
|
||||
extends = ['oAuth2Api'];
|
||||
displayName = '{Display Name} OAuth2 API';
|
||||
documentationUrl = '{doc-slug}'; // matches docs.n8n.io/integrations/...
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
// Include service-specific fields the node needs to construct API calls
|
||||
// (e.g. domain, workspace URL) — add BEFORE the hidden fields below.
|
||||
|
||||
{ displayName: 'Grant Type', name: 'grantType', type: 'hidden', default: 'authorizationCode' },
|
||||
{ displayName: 'Authorization URL', name: 'authUrl', type: 'hidden', default: '{AUTH_URL}', required: true },
|
||||
{ displayName: 'Access Token URL', name: 'accessTokenUrl', type: 'hidden', default: '{TOKEN_URL}', required: true },
|
||||
// Only include authQueryParameters if the service requires extra query params:
|
||||
{ displayName: 'Auth URI Query Parameters', name: 'authQueryParameters', type: 'hidden', default: '{QUERY_PARAMS}' },
|
||||
{ displayName: 'Authentication', name: 'authentication', type: 'hidden', default: 'header' },
|
||||
|
||||
// ── Custom scopes block (ONLY when CUSTOM_SCOPES = yes) ──────────────
|
||||
{
|
||||
displayName: 'Custom Scopes',
|
||||
name: 'customScopes',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Define custom scopes',
|
||||
},
|
||||
{
|
||||
displayName:
|
||||
'The default scopes needed for the node to work are already set. If you change these the node may not function correctly.',
|
||||
name: 'customScopesNotice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
displayOptions: { show: { customScopes: [true] } },
|
||||
},
|
||||
{
|
||||
displayName: 'Enabled Scopes',
|
||||
name: 'enabledScopes',
|
||||
type: 'string',
|
||||
displayOptions: { show: { customScopes: [true] } },
|
||||
default: defaultScopes.join(' '),
|
||||
description: 'Scopes that should be enabled',
|
||||
},
|
||||
// ── End custom scopes block ───────────────────────────────────────────
|
||||
|
||||
{
|
||||
displayName: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'hidden',
|
||||
// Custom scopes: expression toggles between user value and defaults.
|
||||
// Fixed scopes: use the literal defaultScopes string instead.
|
||||
default:
|
||||
'={{$self["customScopes"] ? $self["enabledScopes"] : "' + defaultScopes.join(' ') + '"}}',
|
||||
},
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
**Rules:**
|
||||
- No `authenticate` block — `oAuth2Api` machinery handles Bearer token injection automatically.
|
||||
- No `test` block — the OAuth dance validates the credential.
|
||||
- `defaultScopes` at module level is the single source of truth: it populates both the
|
||||
`enabledScopes` default and the `scope` expression fallback. Update it in one place.
|
||||
- If the service needs a domain / workspace URL for API call construction, add it as a
|
||||
visible `string` field **before** the hidden fields.
|
||||
|
||||
---
|
||||
|
||||
## Step 4 — Register the credential in `package.json`
|
||||
|
||||
File: `packages/nodes-base/package.json`
|
||||
|
||||
Find the `n8n.credentials` array and insert the new entry near other credentials for this
|
||||
service (alphabetical ordering within the service's block):
|
||||
|
||||
```json
|
||||
"dist/credentials/{NODE_NAME}OAuth2Api.credentials.js",
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Step 5 — Update `GENERIC_OAUTH2_CREDENTIALS_WITH_EDITABLE_SCOPE` (custom scopes only)
|
||||
|
||||
**Only do this step when CUSTOM_SCOPES = yes.**
|
||||
|
||||
File: `packages/cli/src/constants.ts`
|
||||
|
||||
Add `'{camelCase}OAuth2Api'` to the `GENERIC_OAUTH2_CREDENTIALS_WITH_EDITABLE_SCOPE`
|
||||
array. Without this, n8n deletes the user's custom scope on OAuth2 reconnect.
|
||||
|
||||
```typescript
|
||||
export const GENERIC_OAUTH2_CREDENTIALS_WITH_EDITABLE_SCOPE = [
|
||||
'oAuth2Api',
|
||||
'googleOAuth2Api',
|
||||
'microsoftOAuth2Api',
|
||||
'highLevelOAuth2Api',
|
||||
'mcpOAuth2Api',
|
||||
'{camelCase}OAuth2Api', // ← add this
|
||||
];
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Step 6 — Update `GenericFunctions.ts`
|
||||
|
||||
### 6a — Standard services (token works directly against the instance URL)
|
||||
|
||||
Add an `else if` branch before the existing `else` fallback:
|
||||
|
||||
```typescript
|
||||
} else if ({versionParam} === '{camelCase}OAuth2') {
|
||||
domain = (await this.getCredentials('{camelCase}OAuth2Api')).{domainField} as string;
|
||||
credentialType = '{camelCase}OAuth2Api';
|
||||
} else {
|
||||
```
|
||||
|
||||
### 6b — Gateway services requiring a workspace/cloud ID lookup
|
||||
|
||||
When the OAuth token is scoped for a gateway URL rather than the direct instance URL
|
||||
(Atlassian's `api.atlassian.com` is the canonical example), add a module-level cache and
|
||||
lookup helper **before** the main request function:
|
||||
|
||||
```typescript
|
||||
// Module-level cache: normalised domain → site/cloud ID
|
||||
export const _cloudIdCache = new Map<string, string>();
|
||||
|
||||
async function getSiteId(
|
||||
this: IHookFunctions | IExecuteFunctions | ILoadOptionsFunctions,
|
||||
credentialType: string,
|
||||
domain: string,
|
||||
): Promise<string> {
|
||||
const normalizedDomain = domain.replace(/\/$/, '');
|
||||
if (_cloudIdCache.has(normalizedDomain)) return _cloudIdCache.get(normalizedDomain)!;
|
||||
|
||||
const resources = (await this.helpers.requestWithAuthentication.call(this, credentialType, {
|
||||
uri: '{ACCESSIBLE_RESOURCES_ENDPOINT}',
|
||||
json: true,
|
||||
})) as Array<{ id: string; url: string }>;
|
||||
|
||||
const site = resources.find((r) => r.url === normalizedDomain);
|
||||
if (!site) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`No accessible site found for domain: ${domain}. Make sure the domain matches your site URL exactly.`,
|
||||
);
|
||||
}
|
||||
|
||||
_cloudIdCache.set(normalizedDomain, site.id);
|
||||
return site.id;
|
||||
}
|
||||
```
|
||||
|
||||
Then in the main request function:
|
||||
|
||||
```typescript
|
||||
} else if ({versionParam} === '{camelCase}OAuth2') {
|
||||
const rawDomain = (await this.getCredentials('{camelCase}OAuth2Api')).domain as string;
|
||||
credentialType = '{camelCase}OAuth2Api';
|
||||
const siteId = await getSiteId.call(this, credentialType, rawDomain);
|
||||
domain = `{GATEWAY_BASE_URL}/${siteId}`;
|
||||
} else {
|
||||
```
|
||||
|
||||
The existing `uri: \`${domain}/rest${endpoint}\`` construction then produces the correct
|
||||
gateway URL automatically.
|
||||
|
||||
Add `NodeOperationError` to the `n8n-workflow` import if not already present.
|
||||
|
||||
---
|
||||
|
||||
## Step 7 — Update the node file(s)
|
||||
|
||||
### Main node (`*.node.ts`)
|
||||
|
||||
**Credentials array** — add an entry for the new credential type:
|
||||
|
||||
```typescript
|
||||
{
|
||||
name: '{camelCase}OAuth2Api',
|
||||
required: true,
|
||||
displayOptions: { show: { {versionParam}: ['{camelCase}OAuth2'] } },
|
||||
},
|
||||
```
|
||||
|
||||
**Version/auth options** — add to the `{versionParam}` (or equivalent) options list:
|
||||
|
||||
```typescript
|
||||
{ name: '{Display Name} (OAuth2)', value: '{camelCase}OAuth2' },
|
||||
```
|
||||
|
||||
Keep `default` unchanged — existing workflows must not be affected.
|
||||
|
||||
### Trigger node (`*Trigger.node.ts`, if present)
|
||||
|
||||
Same two changes. Preserve any `displayName` label pattern already used by other credential
|
||||
entries in that trigger node's credentials array.
|
||||
|
||||
---
|
||||
|
||||
## Step 8 — Write credential tests
|
||||
|
||||
File: `packages/nodes-base/credentials/test/{NODE_NAME}OAuth2Api.credentials.test.ts`
|
||||
|
||||
Use `ClientOAuth2` from `@n8n/client-oauth2` and `nock` for HTTP mocking. Follow the
|
||||
structure in `MicrosoftTeamsOAuth2Api.credentials.test.ts`.
|
||||
|
||||
Required test cases:
|
||||
1. **Metadata** — name, extends array, `enabledScopes` default, auth URL, token URL,
|
||||
`authQueryParameters` default (if applicable).
|
||||
2. **Default scopes in authorization URI** — call `oauthClient.code.getUri()`, assert each
|
||||
default scope is present.
|
||||
3. **Token retrieval with default scopes** — mock the token endpoint with `nock`, call
|
||||
`oauthClient.code.getToken(...)`, assert `token.data.scope` contains each scope.
|
||||
4. **Custom scopes in authorization URI** _(skip when CUSTOM_SCOPES = no)_.
|
||||
5. **Token retrieval with custom scopes** _(skip when CUSTOM_SCOPES = no)_.
|
||||
6. **Minimal / different scope set** _(skip when CUSTOM_SCOPES = no)_ — assert scopes not
|
||||
in the set are absent from both the URI and token response.
|
||||
|
||||
Lifecycle hooks required:
|
||||
```typescript
|
||||
beforeAll(() => { nock.disableNetConnect(); });
|
||||
afterAll(() => { nock.restore(); });
|
||||
afterEach(() => { nock.cleanAll(); });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Step 9 — Update `GenericFunctions.test.ts`
|
||||
|
||||
In the credential-routing `describe` block:
|
||||
|
||||
1. If a site-ID cache (`_cloudIdCache`) was added, import it and call
|
||||
`_cloudIdCache.clear()` (or equivalent) in `afterEach`.
|
||||
2. Add/update the OAuth2 routing test case:
|
||||
- **Simple routing**: assert `getCredentials` was called with the correct credential
|
||||
name and `requestWithAuthentication` was called with the correct name and URI.
|
||||
- **Gateway lookup**: mock `requestWithAuthentication` to return the accessible-resources
|
||||
payload on the first call and `{}` on the second. Assert the first call targets the
|
||||
resources endpoint and the second call uses the gateway base URL with the site ID.
|
||||
|
||||
---
|
||||
|
||||
## Step 10 — Verify
|
||||
|
||||
```bash
|
||||
# From packages/nodes-base/
|
||||
pnpm test credentials/test/{NODE_NAME}OAuth2Api.credentials.test.ts
|
||||
pnpm test nodes/{NODE_NAME}/__test__/GenericFunctions.test.ts
|
||||
pnpm typecheck
|
||||
pnpm lint
|
||||
|
||||
# Only when constants.ts was changed:
|
||||
pushd ../cli && pnpm typecheck && popd
|
||||
```
|
||||
|
||||
Fix any type errors before finishing. Never skip `pnpm typecheck`.
|
||||
|
|
@ -1,139 +0,0 @@
|
|||
---
|
||||
name: n8n:protect-endpoints
|
||||
description: Applies n8n's RBAC scope decorators to REST endpoints. Use when creating a new @RestController, adding any @Get/@Post/@Put/@Patch/@Delete route to an existing controller, or reviewing endpoint authorization. Every authenticated endpoint must be gated by @ProjectScope or @GlobalScope.
|
||||
---
|
||||
|
||||
# Protect REST endpoints with RBAC
|
||||
|
||||
**Rule:** every authenticated route on a `@RestController` MUST carry an access-scope decorator. If you add a route without one, the IDOR/permission bypass is on you.
|
||||
|
||||
## Decision
|
||||
|
||||
```
|
||||
URL has :projectId → @ProjectScope('<resource>:<op>')
|
||||
URL has no project → @GlobalScope('<resource>:<op>')
|
||||
skipAuth: true → no decorator + comment explaining alternate auth
|
||||
```
|
||||
|
||||
`@ProjectScope` succeeds if the user has the scope **globally OR in the project named in the URL**. `@GlobalScope` ignores project relations entirely.
|
||||
|
||||
Both decorators come from `@n8n/decorators`. The middleware lives in `packages/cli/src/controller.registry.ts` (`createScopedMiddleware`) and resolves access via `userHasScopes` in `packages/cli/src/permissions.ee/check-access.ts`.
|
||||
|
||||
## Apply the decorator
|
||||
|
||||
```ts
|
||||
import { Get, Post, ProjectScope, RestController } from '@n8n/decorators';
|
||||
|
||||
@RestController('/projects/:projectId/widgets')
|
||||
export class WidgetsController {
|
||||
@Post('/')
|
||||
@ProjectScope('widget:create') // create
|
||||
async create(...) { ... }
|
||||
|
||||
@Get('/:widgetId')
|
||||
@ProjectScope('widget:read') // read one
|
||||
async get(...) { ... }
|
||||
|
||||
@Get('/')
|
||||
@ProjectScope('widget:list') // list
|
||||
async list(...) { ... }
|
||||
|
||||
@Patch('/:widgetId')
|
||||
@ProjectScope('widget:update') // update
|
||||
async update(...) { ... }
|
||||
|
||||
@Delete('/:widgetId')
|
||||
@ProjectScope('widget:delete') // delete
|
||||
async delete(...) { ... }
|
||||
}
|
||||
```
|
||||
|
||||
Conventions:
|
||||
- One decorator per route, placed directly under the HTTP-method decorator.
|
||||
- Use the most specific scope that fits. Reuse `*:update` for state-changing actions like `publish`/`unpublish`/`build` unless the resource needs to gate them separately (see `workflow:publish` for the precedent).
|
||||
- Routes without `:projectId` and not global-only operations are usually a design smell — flag it.
|
||||
|
||||
## When the scope doesn't exist yet
|
||||
|
||||
Add the resource and ops in `packages/@n8n/permissions/`:
|
||||
|
||||
1. **`src/constants.ee.ts`** — add to `RESOURCES` (alphabetical):
|
||||
```ts
|
||||
widget: [...DEFAULT_OPERATIONS, 'execute'] as const,
|
||||
```
|
||||
The `Scope` union (`<resource>:<op>` template-literal type) auto-derives.
|
||||
2. **`src/scope-information.ts`** — add a display name + description per scope.
|
||||
3. **`src/roles/scopes/project-scopes.ee.ts`** — add to project roles. Match the `workflow` precedent unless product says otherwise:
|
||||
- `REGULAR_PROJECT_ADMIN_SCOPES`, `PERSONAL_PROJECT_OWNER_SCOPES`, `PROJECT_EDITOR_SCOPES` → all CRUDL+execute scopes.
|
||||
- `PROJECT_VIEWER_SCOPES` → read/list/execute only.
|
||||
- `PROJECT_CHAT_USER_SCOPES` → execute only (if applicable).
|
||||
4. **`src/roles/scopes/global-scopes.ee.ts`** — add to `GLOBAL_OWNER_SCOPES` (admin inherits via `concat()`). Do **not** add to member/chat-user globals — they get scopes via project relations.
|
||||
5. **Personal-space publishing**: if you add a `<resource>:publish` scope, also append it to `PERSONAL_SPACE_PUBLISHING_SETTING.scopes` in `constants.ee.ts` so personal-owner gating matches `workflow:publish`.
|
||||
6. **Frontend wiring** — three files in the editor; skipping any of them means the new scopes will not appear in the project-role configuration UI:
|
||||
- `packages/frontend/editor-ui/src/app/stores/rbac.store.ts` — add `<resource>: {}` to `scopesByResourceId` (typecheck will fail otherwise).
|
||||
- `packages/frontend/editor-ui/src/features/project-roles/projectRoleScopes.ts` — add the resource to `UI_OPERATIONS` (operations to render in the permissions matrix, in display order) **and** to `SCOPE_TYPES` (the order the resource group appears on the page).
|
||||
- `packages/frontend/@n8n/i18n/src/locales/en.json` — add `projectRoles.<resource>:<op>` (column label) and `projectRoles.<resource>:<op>.tooltip` (hover description) for every op, plus `projectRoles.type.<resource>` (the group header).
|
||||
7. **Snapshot** — update `packages/@n8n/permissions/src/__tests__/__snapshots__/scope-information.test.ts.snap` to include the new `<resource>:*` entries.
|
||||
|
||||
No DB migration needed — `AuthRolesService.init()` syncs scopes/roles on every startup. Custom team roles created in the UI are **not** auto-updated; mention this in the PR description.
|
||||
|
||||
## Public / unauthenticated routes
|
||||
|
||||
`{ skipAuth: true }` skips the auth middleware → `req.user` is undefined → adding `@ProjectScope` would 401 every call. Public routes (third-party webhooks, signed callbacks) must:
|
||||
|
||||
1. **Omit the scope decorator.**
|
||||
2. Authenticate via signature/HMAC verification inside the handler (or another route-specific mechanism).
|
||||
3. Carry a comment explaining why no scope is applied, so the next reviewer doesn't try to "fix" it.
|
||||
|
||||
Example:
|
||||
```ts
|
||||
// Third-party webhook callback: do not add @ProjectScope. Auth happens
|
||||
// via per-platform signature verification inside webhookHandler, and
|
||||
// :projectId is unused in the (agentId, platform) lookup.
|
||||
@Post('/:agentId/webhooks/:platform', { skipAuth: true, allowBots: true })
|
||||
async handleWebhook(...) { ... }
|
||||
```
|
||||
|
||||
## Verify with a route-metadata test
|
||||
|
||||
Add a regression test that fails when a future route is added without a scope. Iterate every route on the controller via `ControllerRegistryMetadata` and assert the gate.
|
||||
|
||||
```ts
|
||||
import { ControllerRegistryMetadata } from '@n8n/decorators';
|
||||
import { Container } from '@n8n/di';
|
||||
import { WidgetsController } from '../widgets.controller';
|
||||
|
||||
const UNAUTHENTICATED_HANDLERS = new Set<string>(); // add public handler names here
|
||||
|
||||
const metadata = Container.get(ControllerRegistryMetadata).getControllerMetadata(
|
||||
WidgetsController as never,
|
||||
);
|
||||
const routeCases = Array.from(metadata.routes.entries()).map(([handlerName, route]) => ({
|
||||
handlerName, route,
|
||||
}));
|
||||
|
||||
describe('WidgetsController route access scopes', () => {
|
||||
it.each(routeCases)(
|
||||
'$handlerName is gated by a project-scoped widget:* check',
|
||||
({ handlerName, route }) => {
|
||||
if (UNAUTHENTICATED_HANDLERS.has(handlerName)) {
|
||||
expect(route.accessScope).toBeUndefined();
|
||||
expect(route.skipAuth).toBe(true);
|
||||
return;
|
||||
}
|
||||
expect(route.accessScope).toBeDefined();
|
||||
expect(route.accessScope?.globalOnly).toBe(false);
|
||||
expect(route.accessScope?.scope.startsWith('widget:')).toBe(true);
|
||||
},
|
||||
);
|
||||
});
|
||||
```
|
||||
|
||||
## Defense in depth (still required)
|
||||
|
||||
Decorator alone is not enough when handlers leak data via downstream calls. Service/repository methods should still **filter by `projectId`** (or user-scoped helpers like `findByUser`). The decorator gates *who can call this URL*; the service gates *what they can read*. Both, always.
|
||||
|
||||
## Reference patterns
|
||||
|
||||
- Project-scoped CRUD: `packages/cli/src/workflows/workflows.controller.ts`, `packages/cli/src/credentials/credentials.controller.ts`, `packages/cli/src/modules/data-table/data-table.controller.ts`.
|
||||
- Mixed global + project: `packages/cli/src/controllers/project.controller.ts`.
|
||||
|
|
@ -1,135 +0,0 @@
|
|||
---
|
||||
name: n8n:reproduce-bug
|
||||
description: Reproduce a bug from a Linear ticket with a failing test. Expects the full ticket context (title, description, comments) to be provided as input.
|
||||
---
|
||||
|
||||
# Bug Reproduction Framework
|
||||
|
||||
Given a Linear ticket context ($ARGUMENTS), systematically reproduce the bug
|
||||
with a failing regression test.
|
||||
|
||||
## Step 1: Parse Signals
|
||||
|
||||
Extract the following from the provided ticket context:
|
||||
- **Error message / stack trace** (if provided)
|
||||
- **Reproduction steps** (if provided)
|
||||
- **Workflow JSON** (if attached)
|
||||
- **Affected area** (node, execution engine, editor, API, config, etc.)
|
||||
- **Version where it broke / last working version**
|
||||
|
||||
|
||||
## Step 2: Route to Test Strategy
|
||||
|
||||
Based on the affected area, pick the test layer and pattern:
|
||||
|
||||
| Area | Test Layer | Pattern | Key Location |
|
||||
|------|-----------|---------|--------------|
|
||||
| Node operation | Jest unit | NodeTestHarness + nock | `packages/nodes-base/nodes/*/test/` |
|
||||
| Node credential | Jest unit | jest-mock-extended | `packages/nodes-base/nodes/*/test/` |
|
||||
| Trigger webhook | Jest unit | mock IHookFunctions + jest.mock GenericFunctions | `packages/nodes-base/nodes/*/test/` |
|
||||
| Binary data | Jest unit | NodeTestHarness assertBinaryData | `packages/core/nodes-testing/` |
|
||||
| Execution engine | Jest integration | WorkflowRunner + DI container | `packages/cli/src/__tests__/` |
|
||||
| CLI / API | Jest integration | setupTestServer + supertest | `packages/cli/test/integration/` |
|
||||
| Config | Jest unit | GlobalConfig + Container | `packages/@n8n/config/src/__tests__/` |
|
||||
| Editor UI | Vitest | Vue Test Utils + Pinia | `packages/frontend/editor-ui/src/**/__tests__/` |
|
||||
| E2E / Canvas | Playwright | Test containers + composables | `packages/testing/playwright/` |
|
||||
|
||||
## Step 3: Locate Source Files
|
||||
|
||||
Find the source code for the affected area:
|
||||
1. Search for the node/service/component mentioned in the ticket
|
||||
2. Find the GenericFunctions file (common bug location for nodes)
|
||||
3. Check for existing test files in the same area
|
||||
4. Look at recent git history on affected files (`git log --oneline -10 -- <path>`)
|
||||
|
||||
## Step 4: Trace the Code Path
|
||||
|
||||
Read the source code and trace the execution path that triggers the bug:
|
||||
- Follow the call chain from entry point to the failure
|
||||
- Identify the specific line(s) where the bug manifests
|
||||
- Note any error handling (or lack thereof) around the bug
|
||||
|
||||
## Step 5: Form Hypothesis
|
||||
|
||||
State a clear, testable hypothesis:
|
||||
- "When [input/condition], the code does [wrong thing] because [root cause]"
|
||||
- Identify the exact line(s) that need to change
|
||||
- Predict what the test output will show
|
||||
|
||||
## Step 6: Find Test Patterns
|
||||
|
||||
Look for existing tests in the same area:
|
||||
1. Check `test/` directories near the affected code
|
||||
2. Identify which mock/setup patterns they use
|
||||
3. Use the same patterns for consistency
|
||||
4. If no tests exist, find the closest similar node/service tests as a template
|
||||
|
||||
## Step 7: Write Failing Test
|
||||
|
||||
Write a regression test that:
|
||||
- Uses the patterns found in Step 6
|
||||
- Targets the specific hypothesis from Step 5
|
||||
- Includes a comment referencing the ticket ID
|
||||
- Asserts the CORRECT behavior (test will fail on current code)
|
||||
- Also includes a "happy path" test to prove the setup works
|
||||
|
||||
## Step 8: Run and Score
|
||||
|
||||
Run the test from the package directory (e.g., `cd packages/nodes-base && pnpm test <file>`).
|
||||
|
||||
Classify the result:
|
||||
|
||||
| Confidence | Criteria | Output |
|
||||
|------------|----------|--------|
|
||||
| **CONFIRMED** | Test fails consistently, failure matches hypothesis | Reproduction Report |
|
||||
| **LIKELY** | Test fails but failure mode differs slightly | Report + caveat |
|
||||
| **UNCONFIRMED** | Cannot trigger the failure | Report: what was tried |
|
||||
| **SKIPPED** | Hit a hard bailout trigger | Report: why skipped |
|
||||
| **ALREADY_FIXED** | Bug no longer reproduces on current code | Report: when fixed |
|
||||
|
||||
## Step 9: Iterate or Bail
|
||||
|
||||
If UNCONFIRMED after first attempt:
|
||||
- Revisit hypothesis — re-read the code path
|
||||
- Try a different test approach or layer
|
||||
- Maximum 3 attempts before declaring UNCONFIRMED
|
||||
|
||||
**Hard bailout triggers** (stop immediately):
|
||||
- Requires real third-party API credentials
|
||||
- Race condition / timing-dependent
|
||||
- Requires specific cloud/enterprise infrastructure
|
||||
- Requires manual UI interaction that can't be scripted
|
||||
|
||||
## Output: Reproduction Report
|
||||
|
||||
Present findings in this format:
|
||||
|
||||
---
|
||||
|
||||
**Ticket:** [ID] — [title]
|
||||
**Confidence:** [CONFIRMED | LIKELY | UNCONFIRMED | SKIPPED | ALREADY_FIXED]
|
||||
|
||||
### Root Cause
|
||||
[1-2 sentences explaining the bug mechanism]
|
||||
|
||||
### Location
|
||||
| File | Lines | Issue |
|
||||
|------|-------|-------|
|
||||
| `path/to/file.ts` | XX-YY | Description of the problem |
|
||||
|
||||
### Failing Test
|
||||
`path/to/test/file.test.ts` — X/Y tests fail:
|
||||
1. `test name` — [failure description]
|
||||
|
||||
### Fix Hint
|
||||
[Pseudocode or description of the fix approach]
|
||||
|
||||
---
|
||||
|
||||
## Important
|
||||
|
||||
- **DO NOT fix the bug** — only reproduce it with a failing test
|
||||
- **Leave test files in place** as evidence (don't commit unless asked)
|
||||
- **Run tests from the package directory** (e.g., `pushd packages/nodes-base && pnpm test <file> && popd`)
|
||||
- **Always redirect build output**: `pnpm build > build.log 2>&1`
|
||||
- **DO NOT look at existing fix PRs** — the goal is to reproduce from signals alone
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
---
|
||||
name: n8n:setup-mcps
|
||||
description: >-
|
||||
Configure MCP servers for n8n development. Use when the user says /setup-mcps
|
||||
or asks to set up MCP servers for n8n.
|
||||
---
|
||||
|
||||
# MCP Setup for n8n Development
|
||||
|
||||
Configure commonly used MCP servers for n8n engineers.
|
||||
|
||||
## Instructions
|
||||
|
||||
1. First, check which MCPs are already configured by running:
|
||||
```bash
|
||||
claude mcp list
|
||||
```
|
||||
Parse the output and match by **URL/command**, not server name (users may have
|
||||
used different names). The URLs to check for:
|
||||
- Linear: `mcp.linear.app`
|
||||
- Notion: `mcp.notion.com`
|
||||
- Context7: `ctx7` or `context7-mcp`
|
||||
- Figma: `mcp.figma.com`
|
||||
|
||||
Skip any MCP whose URL/command is already present (regardless of scope or name).
|
||||
|
||||
2. Present the MCP selection menu using `AskUserQuestion` with `multiSelect: true`.
|
||||
Only show MCPs that are **not** already configured. If all are already configured,
|
||||
inform the user and skip the menu.
|
||||
|
||||
| Option | Label | Description |
|
||||
|--------|-------|-------------|
|
||||
| Linear | `Linear` | Linear ticket management (HTTP, OAuth — opens browser to authenticate) |
|
||||
| Notion | `Notion` | Notion workspace integration (HTTP, OAuth — opens browser to authenticate) |
|
||||
| Context7 | `Context7` | Library documentation lookup (OAuth setup via CLI) |
|
||||
| Figma | `Figma` | Figma design integration (HTTP, OAuth — opens browser to authenticate) |
|
||||
|
||||
3. Process each selected MCP **one at a time** in a loop. For each MCP:
|
||||
a. Ask the user via `AskUserQuestion`: "Where should **{MCP name}** be installed?"
|
||||
- **user** (default, recommended) — available in all projects
|
||||
- **local** — only in this project
|
||||
b. Run the install command for that MCP with the chosen scope
|
||||
c. Then move to the next MCP and ask again
|
||||
|
||||
Do NOT batch the scope question — ask separately for each MCP.
|
||||
Do NOT offer project scope — it modifies `.claude/settings.json` which is tracked in git.
|
||||
|
||||
Commands per MCP:
|
||||
|
||||
### Linear
|
||||
```bash
|
||||
claude mcp add -s {scope} linear-server --transport http https://mcp.linear.app/mcp
|
||||
```
|
||||
After adding, tell the user to run `/mcp` in their next session to authenticate.
|
||||
|
||||
### Notion
|
||||
```bash
|
||||
claude mcp add -s {scope} notion --transport http https://mcp.notion.com/mcp
|
||||
```
|
||||
After adding, tell the user to run `/mcp` in their next session to authenticate.
|
||||
|
||||
### Context7
|
||||
Tell the user to run this command themselves (it handles auth via OAuth automatically):
|
||||
|
||||
```
|
||||
npx ctx7 setup --claude
|
||||
```
|
||||
|
||||
### Figma
|
||||
```bash
|
||||
claude mcp add -s {scope} figma --transport http https://mcp.figma.com/mcp
|
||||
```
|
||||
After adding, tell the user to run `/mcp` in their next session to authenticate.
|
||||
|
||||
5. After running the commands, confirm which MCPs were configured and note any
|
||||
manual steps remaining (authentication via `/mcp`, Context7 setup).
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
---
|
||||
name: n8n:spec-driven-development
|
||||
description: Keeps implementation and specs in sync. Use when working on a feature that has a spec in .claude/specs/, when the user says /spec, or when starting implementation of a documented feature. Also use when the user asks to verify implementation against a spec or update a spec after changes.
|
||||
---
|
||||
|
||||
# Spec-Driven Development
|
||||
|
||||
Specs live in `.claude/specs/`. They are the source of truth for architectural
|
||||
decisions, API contracts, and implementation scope. Implementation and specs
|
||||
must stay in sync — neither leads exclusively.
|
||||
|
||||
## Core Loop
|
||||
|
||||
```
|
||||
Read spec → Implement → Verify alignment → Update spec or code → Repeat
|
||||
```
|
||||
|
||||
## Before Starting Work
|
||||
|
||||
1. **Find the spec.** Search `.claude/specs/` for files matching the feature:
|
||||
|
||||
```bash
|
||||
ls .claude/specs/
|
||||
```
|
||||
|
||||
2. **Read the full spec.** Understand scope, decisions, API contracts, and
|
||||
open questions before writing code.
|
||||
|
||||
3. **If no spec exists** and the task is non-trivial (new module, new API,
|
||||
architectural change), ask the user whether to create one first.
|
||||
|
||||
## During Implementation
|
||||
|
||||
- **Reference spec decisions** — don't re-decide what the spec already settled.
|
||||
- **When you diverge from the spec** (better approach found, user requested
|
||||
change, constraint discovered), update the spec immediately in the same
|
||||
session. Don't leave spec and code out of sync.
|
||||
- **Tick off TODO checkboxes** (`- [ ]` → `- [x]`) as items are completed.
|
||||
- **Strike through or annotate** items that were deliberately skipped or
|
||||
replaced, with a brief reason:
|
||||
```markdown
|
||||
- [x] ~~OpenRouter proxy~~ → Direct execution: nodes call OpenRouter directly
|
||||
```
|
||||
|
||||
## After Completing Work
|
||||
|
||||
Run a spec verification pass:
|
||||
|
||||
1. **Re-read the spec** alongside the implementation.
|
||||
2. **Check each section:**
|
||||
- Do API endpoints in spec match the controller?
|
||||
- Do config/env vars in spec match the config class?
|
||||
- Does the module structure in spec match the actual file tree?
|
||||
- Do type definitions in spec match `@n8n/api-types`?
|
||||
- Are all TODO items correctly checked/unchecked?
|
||||
3. **Update the spec** for any drift found. Common drift:
|
||||
- New files added that aren't listed in the structure section
|
||||
- API response shapes changed during implementation
|
||||
- Config defaults adjusted
|
||||
- Architectural decisions refined
|
||||
4. **Flag unresolved gaps** to the user — things the spec promises but
|
||||
implementation doesn't deliver yet (acceptable for MVP, but should be noted).
|
||||
|
||||
## Spec File Conventions
|
||||
|
||||
- One or more markdown files per feature in `.claude/specs/`.
|
||||
- Keep specs concise. Use tables for mappings, code blocks for shapes.
|
||||
- Use `## Implementation TODO` with checkboxes to track progress.
|
||||
- Split into multiple files when it helps (e.g. separate backend/frontend),
|
||||
but don't enforce a rigid naming scheme.
|
||||
|
||||
## When the User Asks to "Self-Review" or "Verify Against Spec"
|
||||
|
||||
1. Read all relevant specs.
|
||||
2. Read all implementation files.
|
||||
3. Produce a structured comparison:
|
||||
- **Aligned**: items where spec and code match
|
||||
- **Drift**: items where they diverge (fix immediately)
|
||||
- **Gaps**: spec items not yet implemented (note as future work)
|
||||
4. Fix drift, update specs, report gaps to the user.
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(git log:*)",
|
||||
"Bash(git show:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(ls:*)",
|
||||
"Bash(pnpm build)",
|
||||
"Bash(pnpm lint:*)",
|
||||
"Bash(pnpm test:*)",
|
||||
"Bash(pnpm typecheck:*)",
|
||||
"Bash(popd)",
|
||||
"Bash(pushd:*)",
|
||||
"Bash(mkdir -p .claude/plans)",
|
||||
"Write(.claude/plans/*)"
|
||||
]
|
||||
},
|
||||
"hooks": {
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Skill",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "node .claude/plugins/n8n/scripts/track-skill-usage.mjs",
|
||||
"timeout": 10,
|
||||
"async": true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"extraKnownMarketplaces": {
|
||||
"n8n": {
|
||||
"source": {
|
||||
"source": "directory",
|
||||
"path": "./.claude/plugins/n8n"
|
||||
}
|
||||
}
|
||||
},
|
||||
"enabledPlugins": {
|
||||
"n8n@n8n": true
|
||||
}
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
plugins/n8n/skills
|
||||
|
|
@ -1,545 +0,0 @@
|
|||
{
|
||||
"version": 1,
|
||||
"generated": "2026-05-12T09:37:31.489Z",
|
||||
"totalViolations": 82,
|
||||
"violations": {
|
||||
"packages/@n8n/ai-workflow-builder.ee/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 73,
|
||||
"message": "langsmith@^0.4.6 should use \"catalog:\" (exists in pnpm-workspace.yaml)",
|
||||
"hash": "6ee5e003d795"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 61,
|
||||
"message": "@mozilla/readability appears in 5 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "d2120f012c93"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 70,
|
||||
"message": "csv-parse appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "94f80b083b76"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 71,
|
||||
"message": "jsdom appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "9c770d66baf2"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 77,
|
||||
"message": "turndown appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "85c311d87491"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 83,
|
||||
"message": "@types/turndown appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "407c8d1b3428"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/json-schema-to-zod/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 63,
|
||||
"message": "zod@^3.25.76 should use \"catalog:\" (exists in pnpm-workspace.yaml)",
|
||||
"hash": "0e18482e8781"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/nodes-langchain/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 292,
|
||||
"message": "openai@^6.34.0 should use \"catalog:\" (exists in pnpm-workspace.yaml)",
|
||||
"hash": "3c1f53f0afe3"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 299,
|
||||
"message": "tmp-promise appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "88d67e2ef747"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 259,
|
||||
"message": "@mozilla/readability appears in 5 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "69d6fa7e46f9"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 274,
|
||||
"message": "cheerio appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "8cd029bb871e"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 284,
|
||||
"message": "jsdom appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "26f20ebea4b1"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 289,
|
||||
"message": "mongodb appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "46cb48884e22"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 293,
|
||||
"message": "pdf-parse appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "0c7d44a9c2e4"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/tournament/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 44,
|
||||
"message": "@types/node@^18.13.0 should use \"catalog:\" (exists in pnpm-workspace.yaml)",
|
||||
"hash": "6368b5d3b924"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 52,
|
||||
"message": "typescript@^5.0.0 should use \"catalog:\" (exists in pnpm-workspace.yaml)",
|
||||
"hash": "f668021a144e"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 55,
|
||||
"message": "ast-types appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "27edcbb2b4f8"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 56,
|
||||
"message": "esprima-next appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "75058f9a4d30"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 57,
|
||||
"message": "recast appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "5f2b50fef19d"
|
||||
}
|
||||
],
|
||||
"packages/frontend/@n8n/chat/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 56,
|
||||
"message": "unplugin-icons@^0.19.0 should use \"catalog:frontend\" (exists in pnpm-workspace.yaml [frontend])",
|
||||
"hash": "a0d24d761026"
|
||||
}
|
||||
],
|
||||
"packages/frontend/@n8n/design-system/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 73,
|
||||
"message": "@vueuse/core@* should use \"catalog:frontend\" (exists in pnpm-workspace.yaml [frontend])",
|
||||
"hash": "237e9d17c4ba"
|
||||
}
|
||||
],
|
||||
"packages/cli/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 98,
|
||||
"message": "@ai-sdk/anthropic appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "1e3686e1923b"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 139,
|
||||
"message": "@opentelemetry/sdk-trace-base appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "1cf7f6bcf5d1"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 140,
|
||||
"message": "@opentelemetry/sdk-trace-node appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "a3dad0b8dc21"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 150,
|
||||
"message": "change-case appears in 5 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "949e802528f7"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 202,
|
||||
"message": "prettier appears in 3 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "3cab98902302"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 209,
|
||||
"message": "semver appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "5b7e9b03fb10"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 217,
|
||||
"message": "undici appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "91c29775e961"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 220,
|
||||
"message": "ws appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "cd07242e8163"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 75,
|
||||
"message": "@types/psl appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "6e62e0076b0a"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/agents/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 28,
|
||||
"message": "@ai-sdk/anthropic appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "b58f03d0d5c1"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 50,
|
||||
"message": "@opentelemetry/sdk-trace-base appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "c5c495ac3508"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 51,
|
||||
"message": "@opentelemetry/sdk-trace-node appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "a77ced903cdf"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/instance-ai/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 80,
|
||||
"message": "@ai-sdk/anthropic appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "5b2153508e47"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 86,
|
||||
"message": "@types/psl appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "56dabb51b433"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 56,
|
||||
"message": "@mozilla/readability appears in 5 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "8fa6b9a8fc91"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 64,
|
||||
"message": "csv-parse appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "8f082fc2e8b6"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 71,
|
||||
"message": "turndown appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "9a9d97065952"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 87,
|
||||
"message": "@types/turndown appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "12e346c47b39"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 50,
|
||||
"message": "@joplin/turndown-plugin-gfm appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "a3cf1504b5c2"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 68,
|
||||
"message": "pdf-parse appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "283fa9114c03"
|
||||
}
|
||||
],
|
||||
"packages/node-dev/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 47,
|
||||
"message": "change-case appears in 5 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "6988b9f58c92"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 46,
|
||||
"message": "@oclif/core appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "da9b64834300"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 53,
|
||||
"message": "tmp-promise appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "632a744e397e"
|
||||
}
|
||||
],
|
||||
"packages/nodes-base/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 911,
|
||||
"message": "change-case appears in 5 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "2d1fab7a5b05"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 961,
|
||||
"message": "semver appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "2daf37aa14e4"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 966,
|
||||
"message": "tmp-promise appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "3f93c404ae9c"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 900,
|
||||
"message": "@mozilla/readability appears in 5 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "ca4ac788adc6"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 912,
|
||||
"message": "cheerio appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "1a1b5bbc50c9"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 915,
|
||||
"message": "csv-parse appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "781db4a1e068"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 917,
|
||||
"message": "eventsource appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "9795e6c6d9e9"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 930,
|
||||
"message": "jsdom appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "02341f2b5e3e"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 941,
|
||||
"message": "mongodb appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "f688907d087a"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 892,
|
||||
"message": "eslint-plugin-n8n-nodes-base appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "ac254baa61f9"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/node-cli/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 52,
|
||||
"message": "change-case appears in 5 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "da74ed210d07"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 59,
|
||||
"message": "prettier appears in 3 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "188baf266f61"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 51,
|
||||
"message": "@oclif/core appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "9711a9b00bf9"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 55,
|
||||
"message": "eslint-plugin-n8n-nodes-base appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "6a9e12780943"
|
||||
}
|
||||
],
|
||||
"packages/frontend/editor-ui/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 69,
|
||||
"message": "change-case appears in 5 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "bd9a2eeb072b"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 90,
|
||||
"message": "prettier appears in 3 packages with 3 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "9e9c7ec09a0b"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 92,
|
||||
"message": "semver appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "d8c606e42c92"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 77,
|
||||
"message": "esprima-next appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "62156c2613b2"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/scan-community-package/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 20,
|
||||
"message": "semver appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "ac0e4301d694"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/ai-utilities/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 69,
|
||||
"message": "undici appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "c14cd05614e8"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 65,
|
||||
"message": "tmp-promise appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "884a45bdbcf2"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 72,
|
||||
"message": "n8n-workflow appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "ea4fbfff30ba"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/mcp-browser/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 36,
|
||||
"message": "ws appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "9650c1b55f3c"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 28,
|
||||
"message": "@mozilla/readability appears in 5 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "0c97891a24f4"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 30,
|
||||
"message": "jsdom appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "8466b03b1044"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 35,
|
||||
"message": "turndown appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "f23a9d3d7aa2"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 42,
|
||||
"message": "@types/turndown appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "3f9e46e56803"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 26,
|
||||
"message": "@joplin/turndown-plugin-gfm appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "743e3a7dbb32"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/task-runner/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 50,
|
||||
"message": "ws appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "51cb5069f382"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/benchmark/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 36,
|
||||
"message": "@oclif/core appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "67f9d81d9528"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/cli/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 74,
|
||||
"message": "@oclif/core appears in 4 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "733c3960022e"
|
||||
}
|
||||
],
|
||||
"packages/workflow/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 58,
|
||||
"message": "ast-types appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "1c7d7cf0b0fe"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 60,
|
||||
"message": "esprima-next appears in 3 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "627a716b5d23"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 68,
|
||||
"message": "recast appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "b660317b5f6f"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/computer-use/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 47,
|
||||
"message": "eventsource appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "f50c1eee2ed6"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/eslint-plugin-community-nodes/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 47,
|
||||
"message": "n8n-workflow appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "c5830b76ff8e"
|
||||
}
|
||||
],
|
||||
"packages/@n8n/stylelint-config/package.json": [
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 29,
|
||||
"message": "stylelint appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "955f3fe044c7"
|
||||
},
|
||||
{
|
||||
"rule": "catalog-violations",
|
||||
"line": 45,
|
||||
"message": "stylelint appears in 2 packages with 2 different versions — add to pnpm-workspace.yaml catalog",
|
||||
"hash": "955f3fe044c7"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +1,9 @@
|
|||
ARG NODE_VERSION=24
|
||||
FROM n8nio/base:22
|
||||
|
||||
FROM node:${NODE_VERSION}-alpine
|
||||
|
||||
ARG NODE_VERSION
|
||||
|
||||
RUN apk add --no-cache \
|
||||
openssh sudo shadow bash libc-utils \
|
||||
git openssl graphicsmagick tini tzdata ca-certificates libc6-compat
|
||||
RUN apk add --no-cache --update openssh sudo shadow bash
|
||||
RUN echo node ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/node && chmod 0440 /etc/sudoers.d/node
|
||||
RUN mkdir /workspaces && chown node:node /workspaces
|
||||
RUN corepack enable
|
||||
RUN npm install -g pnpm
|
||||
|
||||
USER node
|
||||
RUN mkdir -p ~/.pnpm-store && pnpm config set store-dir ~/.pnpm-store --global
|
||||
|
|
|
|||
|
|
@ -1,41 +1,21 @@
|
|||
# Whitelist approach: ignore everything, then allow only what Docker builds need
|
||||
# This reduces build context from ~900MB to just what's required
|
||||
|
||||
# Ignore everything first
|
||||
*
|
||||
|
||||
# === n8n main image (docker/images/n8n/Dockerfile) ===
|
||||
!compiled
|
||||
!compiled/**
|
||||
!THIRD_PARTY_LICENSES.md
|
||||
|
||||
# === runners image (docker/images/runners/Dockerfile + Dockerfile.distroless) ===
|
||||
!dist
|
||||
!dist/task-runner-javascript
|
||||
!dist/task-runner-javascript/**
|
||||
!packages
|
||||
!packages/@n8n
|
||||
!packages/@n8n/task-runner-python
|
||||
!packages/@n8n/task-runner-python/**
|
||||
|
||||
# === Docker build files (entrypoints, configs) ===
|
||||
!docker
|
||||
!docker/images
|
||||
!docker/images/n8n
|
||||
!docker/images/n8n/docker-entrypoint.sh
|
||||
!docker/images/runners
|
||||
!docker/images/runners/n8n-task-runners.json
|
||||
|
||||
# === benchmark image (packages/@n8n/benchmark/Dockerfile) ===
|
||||
!package.json
|
||||
!pnpm-lock.yaml
|
||||
!pnpm-workspace.yaml
|
||||
!patches
|
||||
!patches/**
|
||||
!scripts
|
||||
!scripts/**
|
||||
!packages/@n8n/benchmark
|
||||
!packages/@n8n/benchmark/**
|
||||
!packages/@n8n/typescript-config
|
||||
!packages/@n8n/typescript-config/**
|
||||
|
||||
# We want to include the THIRD_PARTY_LICENSES.md file in the Docker image,
|
||||
# but not other .md files
|
||||
**/*.md
|
||||
!**/THIRD_PARTY_LICENSES.md
|
||||
**/.env
|
||||
.cache
|
||||
assets
|
||||
node_modules
|
||||
packages/node-dev
|
||||
packages/**/node_modules
|
||||
packages/**/dist
|
||||
packages/**/.turbo
|
||||
packages/**/*.test.*
|
||||
.git
|
||||
.github
|
||||
!.github/scripts
|
||||
*.tsbuildinfo
|
||||
docker/compose
|
||||
docker/**/Dockerfile
|
||||
.vscode
|
||||
packages/testing
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
# =============================================================================
|
||||
# n8n local development — minimal environment variables
|
||||
# =============================================================================
|
||||
# This is a minimal example covering some local environment variables for development.
|
||||
# Many more variables exist — search for @Env() decorators in the codebase.
|
||||
# Most of them already have default values, so you only need to fill in the ones you need to change.
|
||||
#
|
||||
# Usage (run from the repo root):
|
||||
# 1. Copy this file: cp .env.local.example .env.local
|
||||
# 2. Fill in the values below
|
||||
# 3. Prefix any dev command with dotenvx, for example:
|
||||
# pnpm exec dotenvx run -f .env.local -- pnpm dev:be
|
||||
#
|
||||
# Note: dotenvx supports variable expansion (e.g. $HOME) but not shell
|
||||
# tilde expansion. Use $HOME instead of ~ for paths.
|
||||
# =============================================================================
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Local data folder
|
||||
# Source: packages/@n8n/config/src/utils/utils.ts
|
||||
# -----------------------------------------------------------------------------
|
||||
N8N_USER_FOLDER=
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# License
|
||||
# Source: packages/@n8n/config/src/configs/license.config.ts
|
||||
# -----------------------------------------------------------------------------
|
||||
# Tenant identifier for the license SDK (for example, self-hosted, sandbox, embed, cloud).
|
||||
N8N_LICENSE_TENANT_ID=
|
||||
# Activation key used to activate or upgrade the instance license.
|
||||
N8N_LICENSE_ACTIVATION_KEY=
|
||||
# Ephemeral license certificate.
|
||||
N8N_LICENSE_CERT=
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# AI
|
||||
# Source: packages/@n8n/config/src/configs/ai.config.ts
|
||||
# packages/@n8n/config/src/configs/ai-assistant.config.ts
|
||||
# packages/@n8n/config/src/configs/ai-builder.config.ts
|
||||
# -----------------------------------------------------------------------------
|
||||
# Whether AI features (such as AI nodes and AI assistant) are enabled globally.
|
||||
N8N_AI_ENABLED=
|
||||
# Base URL of the AI assistant service.
|
||||
# When set, requests are sent to this URL instead of the default provider endpoint.
|
||||
N8N_AI_ASSISTANT_BASE_URL=
|
||||
# API key for the Anthropic (Claude) provider used by the AI workflow builder.
|
||||
# When set, enables AI-powered workflow and node building.
|
||||
N8N_AI_ANTHROPIC_KEY=
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# LangSmith tracing (optional)
|
||||
# Not an n8n config var — read directly by the LangChain SDK.
|
||||
# See: https://docs.smith.langchain.com/
|
||||
# -----------------------------------------------------------------------------
|
||||
LANGSMITH_ENDPOINT=
|
||||
LANGSMITH_PROJECT=
|
||||
LANGSMITH_TRACING=
|
||||
9
.github/CODEOWNERS
vendored
9
.github/CODEOWNERS
vendored
|
|
@ -1,5 +1,6 @@
|
|||
packages/@n8n/db/src/migrations/ @n8n-io/migrations-review
|
||||
.github/workflows @n8n-io/qa-dx
|
||||
.github/scripts @n8n-io/qa-dx
|
||||
.github/actions @n8n-io/qa-dx
|
||||
.github/poutine-rules @n8n-io/qa-dx
|
||||
.github/workflows @n8n-io/ci-admins
|
||||
.github/scripts @n8n-io/ci-admins
|
||||
.github/actions @n8n-io/ci-admins
|
||||
.github/poutine-rules @n8n-io/ci-admins
|
||||
|
||||
|
|
|
|||
232
.github/OWNERS
vendored
232
.github/OWNERS
vendored
|
|
@ -1,232 +0,0 @@
|
|||
# n8n CODEOWNERS
|
||||
#
|
||||
# Last-match-wins: specific rules MUST come AFTER general rules.
|
||||
|
||||
# Default catch-all (ensures every file gets at least one reviewer)
|
||||
* @n8n-io/catalysts
|
||||
|
||||
# Catalysts
|
||||
|
||||
packages/core/ @n8n-io/catalysts
|
||||
packages/workflow/ @n8n-io/catalysts
|
||||
packages/@n8n/config/ @n8n-io/catalysts
|
||||
packages/@n8n/backend-common/ @n8n-io/catalysts
|
||||
packages/@n8n/backend-test-utils/ @n8n-io/catalysts
|
||||
packages/@n8n/di/ @n8n-io/catalysts
|
||||
packages/@n8n/errors/ @n8n-io/catalysts
|
||||
packages/@n8n/constants/ @n8n-io/catalysts
|
||||
packages/@n8n/utils/ @n8n-io/catalysts
|
||||
packages/@n8n/api-types/ @n8n-io/catalysts
|
||||
packages/@n8n/workflow-sdk/ @n8n-io/instance-ai
|
||||
packages/@n8n/task-runner/ @n8n-io/catalysts
|
||||
packages/@n8n/task-runner-python/ @n8n-io/catalysts
|
||||
packages/@n8n/expression-runtime/ @n8n-io/catalysts
|
||||
packages/@n8n/db/ @n8n-io/catalysts
|
||||
packages/@n8n/json-schema-to-zod/ @n8n-io/catalysts
|
||||
packages/@n8n/crdt/ @n8n-io/catalysts
|
||||
packages/@n8n/extension-sdk/ @n8n-io/catalysts
|
||||
packages/@n8n/eslint-config/ @n8n-io/qa-dx
|
||||
packages/@n8n/typescript-config/ @n8n-io/qa-dx
|
||||
|
||||
packages/@n8n/db/src/migrations/ @n8n-io/migrations-review
|
||||
|
||||
# Top-level paths
|
||||
scripts/ @n8n-io/qa-dx
|
||||
patches/ @n8n-io/qa-dx
|
||||
assets/ @n8n-io/adore
|
||||
security/ @n8n-io/qa-dx
|
||||
|
||||
# @n8n/cli
|
||||
packages/@n8n/cli/ @n8n-io/adore
|
||||
packages/@n8n/cli/src/commands/credential/ @n8n-io/iam
|
||||
packages/@n8n/cli/src/commands/user/ @n8n-io/iam
|
||||
packages/@n8n/cli/src/commands/data-table/ @n8n-io/adore
|
||||
packages/@n8n/cli/src/commands/tag/ @n8n-io/adore
|
||||
packages/@n8n/cli/src/commands/project/ @n8n-io/ligo
|
||||
packages/@n8n/cli/src/commands/source-control/ @n8n-io/ligo
|
||||
packages/@n8n/cli/src/commands/variable/ @n8n-io/ligo
|
||||
packages/@n8n/cli/src/commands/skill/ @n8n-io/ai
|
||||
|
||||
# packages/cli
|
||||
packages/cli/ @n8n-io/catalysts
|
||||
packages/cli/src/scaling/ @n8n-io/catalysts
|
||||
packages/cli/src/concurrency/ @n8n-io/catalysts
|
||||
packages/cli/src/execution-lifecycle/ @n8n-io/catalysts
|
||||
packages/cli/src/executions/ @n8n-io/catalysts
|
||||
packages/cli/src/task-runners/ @n8n-io/catalysts
|
||||
packages/cli/src/webhooks/ @n8n-io/catalysts
|
||||
packages/cli/src/push/ @n8n-io/catalysts
|
||||
packages/cli/src/commands/ @n8n-io/catalysts
|
||||
packages/cli/src/config/ @n8n-io/catalysts
|
||||
packages/cli/src/eventbus/ @n8n-io/catalysts
|
||||
packages/cli/src/events/ @n8n-io/catalysts
|
||||
packages/cli/src/security-audit/ @n8n-io/catalysts
|
||||
packages/cli/src/modules/workflow-index/ @n8n-io/catalysts
|
||||
packages/cli/src/modules/breaking-changes/ @n8n-io/catalysts
|
||||
packages/cli/src/modules/otel/ @n8n-io/ligo
|
||||
|
||||
packages/cli/src/auth/ @n8n-io/iam
|
||||
packages/cli/src/credentials/ @n8n-io/iam
|
||||
packages/cli/src/mfa/ @n8n-io/iam
|
||||
packages/cli/src/oauth/ @n8n-io/iam
|
||||
packages/cli/src/permissions.ee/ @n8n-io/iam
|
||||
packages/cli/src/sso.ee/ @n8n-io/iam
|
||||
packages/cli/src/user-management/ @n8n-io/iam
|
||||
packages/cli/src/license/ @n8n-io/iam
|
||||
packages/cli/src/modules/ldap.ee/ @n8n-io/iam
|
||||
packages/cli/src/modules/log-streaming.ee/ @n8n-io/iam
|
||||
packages/cli/src/modules/sso-oidc/ @n8n-io/iam
|
||||
packages/cli/src/modules/sso-saml/ @n8n-io/iam
|
||||
packages/cli/src/modules/provisioning.ee/ @n8n-io/iam
|
||||
packages/cli/src/modules/dynamic-credentials.ee/ @n8n-io/iam
|
||||
packages/cli/src/modules/redaction/ @n8n-io/iam
|
||||
packages/cli/src/modules/instance-registry/ @n8n-io/iam
|
||||
packages/cli/src/modules/token-exchange/ @n8n-io/iam
|
||||
|
||||
packages/cli/src/environments.ee/ @n8n-io/ligo
|
||||
packages/cli/src/public-api/ @n8n-io/ligo
|
||||
packages/cli/src/modules/source-control.ee/ @n8n-io/ligo
|
||||
packages/cli/src/modules/external-secrets.ee/ @n8n-io/ligo
|
||||
packages/cli/src/modules/insights/ @n8n-io/ligo
|
||||
|
||||
packages/cli/src/collaboration/ @n8n-io/catalysts
|
||||
packages/cli/src/binary-data/ @n8n-io/catalysts
|
||||
packages/cli/src/posthog/ @n8n-io/adore
|
||||
packages/cli/src/modules/data-table/ @n8n-io/adore
|
||||
|
||||
packages/cli/src/evaluation.ee/ @n8n-io/ai
|
||||
packages/cli/src/chat/ @n8n-io/ai
|
||||
packages/cli/src/tool-generation/ @n8n-io/ai
|
||||
packages/cli/src/modules/workflow-builder/ @n8n-io/ai
|
||||
packages/cli/src/modules/mcp/ @n8n-io/ai
|
||||
packages/cli/src/modules/quick-connect/ @n8n-io/ai
|
||||
packages/cli/src/modules/chat-hub/ @n8n-io/ai
|
||||
packages/cli/src/modules/instance-ai/ @n8n-io/instance-ai
|
||||
|
||||
packages/cli/src/modules/community-packages/ @n8n-io/nodes
|
||||
|
||||
# CLI controllers
|
||||
packages/cli/src/controllers/auth.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/invitation.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/me.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/mfa.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/owner.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/password-reset.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/role.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/users.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/user-settings.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/api-keys.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/security-settings.controller.ts @n8n-io/iam
|
||||
packages/cli/src/controllers/oauth/ @n8n-io/iam
|
||||
packages/cli/src/controllers/ai.controller.ts @n8n-io/ai
|
||||
packages/cli/src/controllers/annotation-tags.controller.ee.ts @n8n-io/ai
|
||||
packages/cli/src/controllers/cta.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/folder.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/tags.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/binary-data.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/dynamic-templates.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/posthog.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/translation.controller.ts @n8n-io/adore
|
||||
packages/cli/src/controllers/project.controller.ts @n8n-io/ligo
|
||||
packages/cli/src/controllers/workflow-statistics.controller.ts @n8n-io/ligo
|
||||
packages/cli/src/controllers/node-types.controller.ts @n8n-io/nodes
|
||||
packages/cli/src/controllers/dynamic-node-parameters.controller.ts @n8n-io/nodes
|
||||
packages/cli/src/controllers/e2e.controller.ts @n8n-io/qa-dx
|
||||
|
||||
# CLI services
|
||||
packages/cli/src/services/jwt.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/user.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/role.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/role-cache.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/password.utility.ts @n8n-io/iam
|
||||
packages/cli/src/services/public-api-key.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/security-settings.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/ssrf/ @n8n-io/catalysts
|
||||
packages/cli/src/services/static-auth-service.ts @n8n-io/iam
|
||||
packages/cli/src/services/access.service.ts @n8n-io/iam
|
||||
packages/cli/src/services/ai.service.ts @n8n-io/ai
|
||||
packages/cli/src/services/ai-usage.service.ts @n8n-io/ai
|
||||
packages/cli/src/services/ai-workflow-builder.service.ts @n8n-io/ai
|
||||
packages/cli/src/services/annotation-tag.service.ee.ts @n8n-io/ai
|
||||
packages/cli/src/services/folder.service.ts @n8n-io/adore
|
||||
packages/cli/src/services/tag.service.ts @n8n-io/adore
|
||||
packages/cli/src/services/cta.service.ts @n8n-io/adore
|
||||
packages/cli/src/services/dynamic-templates.service.ts @n8n-io/adore
|
||||
packages/cli/src/services/frontend.service.ts @n8n-io/adore
|
||||
packages/cli/src/services/banner.service.ts @n8n-io/adore
|
||||
packages/cli/src/services/project.service.ee.ts @n8n-io/ligo
|
||||
packages/cli/src/services/workflow-statistics.service.ts @n8n-io/ligo
|
||||
packages/cli/src/services/export.service.ts @n8n-io/ligo
|
||||
packages/cli/src/services/import.service.ts @n8n-io/ligo
|
||||
packages/cli/src/services/ownership.service.ts @n8n-io/ligo
|
||||
packages/cli/src/services/dynamic-node-parameters.service.ts @n8n-io/nodes
|
||||
|
||||
# Adore
|
||||
|
||||
packages/frontend/editor-ui/ @n8n-io/frontend
|
||||
packages/frontend/editor-ui/src/features/ai/ @n8n-io/ai
|
||||
packages/frontend/editor-ui/src/features/credentials/ @n8n-io/iam
|
||||
packages/frontend/editor-ui/src/features/execution/ @n8n-io/ligo
|
||||
packages/frontend/editor-ui/src/features/project-roles/ @n8n-io/iam
|
||||
packages/frontend/editor-ui/src/features/integrations/ @n8n-io/nodes
|
||||
|
||||
packages/frontend/@n8n/design-system/ @n8n-io/design
|
||||
packages/frontend/@n8n/stores/ @n8n-io/frontend
|
||||
packages/frontend/@n8n/composables/ @n8n-io/frontend
|
||||
packages/frontend/@n8n/rest-api-client/ @n8n-io/frontend
|
||||
packages/frontend/@n8n/storybook/ @n8n-io/design
|
||||
packages/frontend/@n8n/i18n/ @n8n-io/frontend
|
||||
packages/@n8n/stylelint-config/ @n8n-io/qa-dx
|
||||
|
||||
# AI
|
||||
|
||||
packages/@n8n/instance-ai/ @n8n-io/instance-ai
|
||||
packages/@n8n/nodes-langchain/ @n8n-io/ai
|
||||
packages/@n8n/ai-utilities/ @n8n-io/ai
|
||||
packages/@n8n/ai-node-sdk/ @n8n-io/ai
|
||||
packages/@n8n/ai-workflow-builder.ee/ @n8n-io/ai
|
||||
packages/@n8n/agents/ @n8n-io/ai
|
||||
packages/frontend/@n8n/chat/ @n8n-io/ai
|
||||
|
||||
# Chat
|
||||
|
||||
packages/@n8n/chat-hub/ @n8n-io/ai
|
||||
|
||||
# Nodes
|
||||
|
||||
packages/@n8n/codemirror-lang/ @n8n-io/nodes
|
||||
packages/@n8n/codemirror-lang-html/ @n8n-io/nodes
|
||||
packages/@n8n/codemirror-lang-sql/ @n8n-io/nodes
|
||||
packages/nodes-base/ @n8n-io/nodes
|
||||
packages/@n8n/decorators/ @n8n-io/catalysts
|
||||
packages/node-dev/ @n8n-io/nodes
|
||||
packages/@n8n/create-node/ @n8n-io/nodes
|
||||
packages/@n8n/node-cli/ @n8n-io/nodes
|
||||
packages/@n8n/imap/ @n8n-io/iam
|
||||
packages/@n8n/syslog-client/ @n8n-io/iam
|
||||
packages/@n8n/scan-community-package/ @n8n-io/nodes
|
||||
packages/@n8n/eslint-plugin-community-nodes/ @n8n-io/nodes
|
||||
packages/@n8n/computer-use/ @n8n-io/nodes
|
||||
packages/@n8n/local-gateway/ @n8n-io/nodes
|
||||
packages/@n8n/mcp-browser/ @n8n-io/nodes
|
||||
packages/@n8n/mcp-browser-extension/ @n8n-io/nodes
|
||||
|
||||
# IAM
|
||||
|
||||
packages/@n8n/permissions/ @n8n-io/iam
|
||||
packages/@n8n/client-oauth2/ @n8n-io/iam
|
||||
|
||||
# LiGo
|
||||
|
||||
packages/extensions/insights/ @n8n-io/ligo
|
||||
|
||||
# CI/CD
|
||||
|
||||
.github/ @n8n-io/qa-dx
|
||||
docker/ @n8n-io/qa-dx
|
||||
|
||||
# QA
|
||||
|
||||
packages/testing/ @n8n-io/qa-dx
|
||||
packages/@n8n/benchmark/ @n8n-io/qa-dx
|
||||
packages/@n8n/vitest-config/ @n8n-io/qa-dx
|
||||
20
.github/WORKFLOWS.md
vendored
20
.github/WORKFLOWS.md
vendored
|
|
@ -72,7 +72,6 @@ Complete reference for n8n's `.github/` folder.
|
|||
│ │ (cron) │ │ ├─ docker-build-push (nightly) │───▶│ Images │ │
|
||||
│ └──────────┘ │ ├─ test-benchmark-nightly │───▶│ Metrics │ │
|
||||
│ │ ├─ test-workflows-nightly │ └────────────┘ │
|
||||
│ │ ├─ test-e2e-vm-expressions │ │
|
||||
│ │ └─ test-e2e-coverage-weekly │ │
|
||||
│ └──────────────────────────────────┘ │
|
||||
│ │
|
||||
|
|
@ -242,7 +241,8 @@ CALLER REUSABLE WORKFLOW
|
|||
ci-pull-requests.yml
|
||||
├──────────────────────────▶ test-unit-reusable.yml
|
||||
├──────────────────────────▶ test-linting-reusable.yml
|
||||
├──────────────────────────▶ test-e2e-reusable.yml
|
||||
├──────────────────────────▶ test-e2e-ci-reusable.yml
|
||||
│ └──────────▶ test-e2e-reusable.yml
|
||||
└──────────────────────────▶ sec-ci-reusable.yml
|
||||
└──────────▶ sec-poutine-reusable.yml
|
||||
|
||||
|
|
@ -258,9 +258,6 @@ release-publish.yml
|
|||
test-workflows-nightly.yml
|
||||
└──────────────────────────▶ test-workflows-callable.yml
|
||||
|
||||
test-e2e-vm-expressions-nightly.yml
|
||||
└──────────────────────────▶ test-e2e-reusable.yml
|
||||
|
||||
PR Comment Dispatchers (triggered by /command in PR comments):
|
||||
test-workflows-pr-comment.yml
|
||||
└──────────────────────────▶ test-workflows-callable.yml
|
||||
|
|
@ -353,8 +350,8 @@ Runs on push to `master` or `1.x`:
|
|||
```
|
||||
Push to master/1.x
|
||||
├─ build-github (populate cache)
|
||||
├─ unit-test (matrix: Node 22.x, 24.14.1, 25.x)
|
||||
│ └─ Coverage only on 24.14.1
|
||||
├─ unit-test (matrix: Node 22.x, 24.13.1, 25.x)
|
||||
│ └─ Coverage only on 24.13.1
|
||||
├─ lint
|
||||
└─ notify-on-failure (Slack #alerts-build)
|
||||
```
|
||||
|
|
@ -373,7 +370,6 @@ Push to master/1.x
|
|||
| Daily 00:00 | `util-check-docs-urls.yml` | Doc link validation |
|
||||
| Daily 01:30, 02:30, 03:30 | `test-benchmark-nightly.yml` | Performance benchmarks |
|
||||
| Daily 02:00 | `test-workflows-nightly.yml` | Workflow tests |
|
||||
| Daily 04:00 | `test-e2e-vm-expressions-nightly.yml`| VM expression E2E |
|
||||
| Daily 05:00 | `test-benchmark-destroy-nightly.yml`| Cleanup benchmark env |
|
||||
| Monday 00:00 | `util-update-node-popularity.yml` | Node usage stats |
|
||||
| Monday 02:00 | `test-e2e-coverage-weekly.yml` | Weekly E2E coverage |
|
||||
|
|
@ -394,7 +390,7 @@ Composite actions in `.github/actions/`:
|
|||
|
||||
```yaml
|
||||
inputs:
|
||||
node-version: # default: '24.14.1'
|
||||
node-version: # default: '24.13.1'
|
||||
enable-docker-cache: # default: 'false' (Blacksmith Buildx)
|
||||
build-command: # default: 'pnpm build'
|
||||
```
|
||||
|
|
@ -419,6 +415,8 @@ Workflows with `workflow_call` trigger:
|
|||
| `test-unit-reusable.yml` | `ref`, `nodeVersion`, `collectCoverage` | Unit tests |
|
||||
| `test-linting-reusable.yml` | `ref`, `nodeVersion` | ESLint |
|
||||
| `test-e2e-reusable.yml` | `branch`, `test-mode`, `shards`, `runner` | Core E2E executor |
|
||||
| `test-e2e-ci-reusable.yml` | `branch` | E2E orchestrator |
|
||||
| `test-e2e-docker-pull-reusable.yml`| `branch`, `n8n_version` | E2E with pulled image |
|
||||
| `test-workflows-callable.yml` | `git_ref`, `compare_schemas` | Workflow tests |
|
||||
| `docker-build-push.yml` | `n8n_version`, `release_type`, `push_enabled` | Docker build |
|
||||
| `sec-ci-reusable.yml` | `ref` | Security orchestrator |
|
||||
|
|
@ -487,7 +485,7 @@ Team ownership mappings in `CODEOWNERS`:
|
|||
| `ubuntu-latest` | 2 | Simple jobs, fork PR E2E |
|
||||
| `blacksmith-2vcpu-ubuntu-2204` | 2 | Standard builds, E2E shards |
|
||||
| `blacksmith-4vcpu-ubuntu-2204` | 4 | Unit tests, typecheck, lint |
|
||||
| `blacksmith-8vcpu-ubuntu-2204` | 8 | Heavy parallel workloads |
|
||||
| `blacksmith-8vcpu-ubuntu-2204` | 8 | E2E coverage (weekly) |
|
||||
| `blacksmith-4vcpu-ubuntu-2204-arm` | 4 | ARM64 Docker builds |
|
||||
|
||||
### Selection Guidelines
|
||||
|
|
@ -500,7 +498,7 @@ Team ownership mappings in `CODEOWNERS`:
|
|||
|
||||
**`blacksmith-4vcpu-ubuntu-2204`** - Unit tests (parallelized), linting (parallel file processing), typechecking (CPU-intensive), E2E test shards
|
||||
|
||||
**`blacksmith-8vcpu-ubuntu-2204`** - Heavy parallel workloads
|
||||
**`blacksmith-8vcpu-ubuntu-2204`** - Heavy parallel workloads, full E2E coverage runs
|
||||
|
||||
### Runner Provider Toggle
|
||||
|
||||
|
|
|
|||
51
.github/actions/build-n8n-docker/action.yml
vendored
51
.github/actions/build-n8n-docker/action.yml
vendored
|
|
@ -1,51 +0,0 @@
|
|||
# Builds the n8n and runners CI test images (n8nio/n8n:local +
|
||||
# n8nio/runners:local) and caches the tarball under a SHA-derived key so
|
||||
# downstream jobs in the same workflow run can restore it via
|
||||
# load-n8n-docker.
|
||||
#
|
||||
# Cache-aware: if the tarball for this SHA is already cached (e.g. a parent
|
||||
# workflow ran prepare-docker earlier), the build step is skipped and this
|
||||
# action becomes a no-op.
|
||||
|
||||
name: 'Build n8n CI Docker image'
|
||||
description: 'Builds n8nio/n8n + n8nio/runners test images and publishes them as a SHA-keyed cache tarball for downstream shards.'
|
||||
|
||||
inputs:
|
||||
build-variant:
|
||||
description: 'standard or coverage. Coverage uses build:docker:coverage.'
|
||||
required: false
|
||||
default: 'standard'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check cache for existing image
|
||||
id: cache-check
|
||||
uses: actions/cache/restore@640a1c2554105b57832a23eea0b4672fc7a790d5 # v4.2.3
|
||||
with:
|
||||
key: n8n-docker-image-${{ github.sha }}
|
||||
path: /tmp/n8n-image.tar.zst
|
||||
lookup-only: true
|
||||
|
||||
- name: Build Docker image
|
||||
if: steps.cache-check.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ${{ inputs.build-variant == 'coverage' && 'pnpm build:docker:coverage' || 'pnpm build:docker' }}
|
||||
enable-docker-cache: true
|
||||
env:
|
||||
INCLUDE_TEST_CONTROLLER: 'true'
|
||||
|
||||
- name: Save image tarball
|
||||
if: steps.cache-check.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
docker save n8nio/n8n:local n8nio/runners:local | zstd -T0 -3 -o /tmp/n8n-image.tar.zst
|
||||
ls -lh /tmp/n8n-image.tar.zst
|
||||
|
||||
- name: Publish image tarball to cache
|
||||
if: steps.cache-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@640a1c2554105b57832a23eea0b4672fc7a790d5 # v4.2.3
|
||||
with:
|
||||
key: n8n-docker-image-${{ github.sha }}
|
||||
path: /tmp/n8n-image.tar.zst
|
||||
|
|
@ -1,10 +1,6 @@
|
|||
import { describe, it, before, after } from 'node:test';
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { execFileSync } from 'node:child_process';
|
||||
import { mkdtempSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { matchGlob, parseFilters, evaluateFilter, runValidate, getChangedFiles, getMergeBase } from '../ci-filter.mjs';
|
||||
import { matchGlob, parseFilters, evaluateFilter, runValidate } from '../ci-filter.mjs';
|
||||
|
||||
// --- matchGlob ---
|
||||
|
||||
|
|
@ -176,70 +172,6 @@ describe('evaluateFilter', () => {
|
|||
});
|
||||
});
|
||||
|
||||
// --- getChangedFiles + getMergeBase (integration, exercises real git) ---
|
||||
|
||||
describe('getChangedFiles', () => {
|
||||
const repoDir = mkdtempSync(join(tmpdir(), 'ci-filter-'));
|
||||
const remoteDir = mkdtempSync(join(tmpdir(), 'ci-filter-remote-'));
|
||||
const originalCwd = process.cwd();
|
||||
const git = (args: string[], cwd: string = repoDir) =>
|
||||
execFileSync('git', args, { cwd, stdio: 'pipe' }).toString().trim();
|
||||
|
||||
before(() => {
|
||||
// Bare remote so the action's `git fetch origin <ref>` works
|
||||
execFileSync('git', ['init', '--bare', '-b', 'main', remoteDir], { stdio: 'pipe' });
|
||||
git(['init', '-b', 'main'], repoDir);
|
||||
git(['config', 'user.email', 'test@test.local']);
|
||||
git(['config', 'user.name', 'test']);
|
||||
git(['remote', 'add', 'origin', remoteDir]);
|
||||
|
||||
// Common ancestor commit
|
||||
writeFileSync(join(repoDir, 'shared.ts'), 'shared\n');
|
||||
git(['add', '.']);
|
||||
git(['commit', '-m', 'root']);
|
||||
git(['push', 'origin', 'main']);
|
||||
|
||||
// PR branches off main, adds a file
|
||||
git(['checkout', '-b', 'pr-branch']);
|
||||
writeFileSync(join(repoDir, 'pr-only.ts'), 'pr\n');
|
||||
git(['add', '.']);
|
||||
git(['commit', '-m', 'PR change']);
|
||||
|
||||
// Master drifts forward, modifying shared.ts (the pre-fix bug surface)
|
||||
git(['checkout', 'main']);
|
||||
writeFileSync(join(repoDir, 'shared.ts'), 'shared\ndrift-from-master\n');
|
||||
git(['commit', '-am', 'master moves']);
|
||||
git(['push', 'origin', 'main']);
|
||||
|
||||
// Sit on the PR branch as if running CI
|
||||
git(['checkout', 'pr-branch']);
|
||||
process.chdir(repoDir);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
process.chdir(originalCwd);
|
||||
rmSync(repoDir, { recursive: true, force: true });
|
||||
rmSync(remoteDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns only PR-introduced files (master drift does not pollute)', () => {
|
||||
const changed = getChangedFiles('main');
|
||||
assert.deepEqual(changed, ['pr-only.ts']);
|
||||
});
|
||||
|
||||
it('getMergeBase returns the common ancestor commit', () => {
|
||||
const mergeBase = getMergeBase();
|
||||
assert.match(mergeBase, /^[a-f0-9]{40}$/);
|
||||
const expected = git(['merge-base', 'FETCH_HEAD', 'HEAD']);
|
||||
assert.equal(mergeBase, expected);
|
||||
});
|
||||
|
||||
it('rejects unsafe base refs', () => {
|
||||
assert.throws(() => getChangedFiles('main; rm -rf /'), /Unsafe/);
|
||||
assert.throws(() => getChangedFiles('main$evil'), /Unsafe/);
|
||||
});
|
||||
});
|
||||
|
||||
// --- runValidate ---
|
||||
|
||||
describe('runValidate', () => {
|
||||
|
|
|
|||
9
.github/actions/ci-filter/action.yml
vendored
9
.github/actions/ci-filter/action.yml
vendored
|
|
@ -24,15 +24,6 @@ outputs:
|
|||
results:
|
||||
description: 'JSON object: { "filter-name": true/false }'
|
||||
value: ${{ steps.run.outputs.results }}
|
||||
changed-files:
|
||||
description: 'Newline-separated list of changed files (filter mode only)'
|
||||
value: ${{ steps.run.outputs.changed-files }}
|
||||
base-ref:
|
||||
description: 'Resolved base ref used for the diff (filter mode only)'
|
||||
value: ${{ steps.run.outputs.base-ref }}
|
||||
merge-base:
|
||||
description: 'Merge-base SHA between FETCH_HEAD and HEAD (filter mode only)'
|
||||
value: ${{ steps.run.outputs.merge-base }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
|
|
|
|||
25
.github/actions/ci-filter/ci-filter.mjs
vendored
25
.github/actions/ci-filter/ci-filter.mjs
vendored
|
|
@ -98,30 +98,14 @@ export function getChangedFiles(baseRef) {
|
|||
if (!SAFE_REF.test(baseRef)) {
|
||||
throw new Error(`Unsafe base ref: "${baseRef}"`);
|
||||
}
|
||||
// Deepen the fetch so the merge base is reachable from this shallow clone.
|
||||
// A 2-dot diff (FETCH_HEAD HEAD) reports anything that differs in either
|
||||
// direction, so files added to base-branch after the PR diverged show up as
|
||||
// "changed" — spuriously triggering path-filtered jobs. The merge base
|
||||
// scopes the diff to PR-only changes.
|
||||
execSync(`git fetch --no-tags --prune --deepen=200 origin ${baseRef}`, { stdio: 'pipe' });
|
||||
const output = execSync('git diff --name-only --merge-base FETCH_HEAD HEAD', {
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
execSync(`git fetch --depth=1 origin ${baseRef}`, { stdio: 'pipe' });
|
||||
const output = execSync('git diff --name-only FETCH_HEAD HEAD', { encoding: 'utf-8' });
|
||||
return output
|
||||
.split('\n')
|
||||
.map((f) => f.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the merge-base SHA between FETCH_HEAD and HEAD.
|
||||
* Used to give downstream tools (e.g. janitor's AST diff) a stable, PR-only
|
||||
* comparison point that doesn't drift when the base branch moves forward.
|
||||
*/
|
||||
export function getMergeBase() {
|
||||
return execSync('git merge-base FETCH_HEAD HEAD', { encoding: 'utf-8' }).trim();
|
||||
}
|
||||
|
||||
// --- Filter evaluation ---
|
||||
|
||||
/**
|
||||
|
|
@ -171,9 +155,7 @@ export function runFilter() {
|
|||
|
||||
const filters = parseFilters(filtersInput);
|
||||
const changedFiles = getChangedFiles(baseRef);
|
||||
const mergeBase = getMergeBase();
|
||||
|
||||
console.log(`Merge base: ${mergeBase}`);
|
||||
console.log(`Changed files (${changedFiles.length}):`);
|
||||
for (const f of changedFiles) {
|
||||
console.log(` ${f}`);
|
||||
|
|
@ -188,9 +170,6 @@ export function runFilter() {
|
|||
}
|
||||
|
||||
setOutput('results', JSON.stringify(results));
|
||||
setOutput('changed-files', changedFiles.join('\n'));
|
||||
setOutput('base-ref', baseRef);
|
||||
setOutput('merge-base', mergeBase);
|
||||
}
|
||||
|
||||
// --- Mode: validate ---
|
||||
|
|
|
|||
11
.github/actions/docker-registry-login/action.yml
vendored
11
.github/actions/docker-registry-login/action.yml
vendored
|
|
@ -39,13 +39,10 @@ runs:
|
|||
|
||||
- name: Login to DockerHub
|
||||
if: inputs.login-dockerhub == 'true'
|
||||
shell: bash
|
||||
env:
|
||||
DOCKER_USER: ${{ inputs.dockerhub-username }}
|
||||
DOCKER_PASS: ${{ inputs.dockerhub-password }}
|
||||
run: |
|
||||
node .github/scripts/retry.mjs --attempts 3 --delay 10 \
|
||||
'echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin'
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ inputs.dockerhub-username }}
|
||||
password: ${{ inputs.dockerhub-password }}
|
||||
|
||||
- name: Login to DHI Registry
|
||||
if: inputs.login-dhi == 'true'
|
||||
|
|
|
|||
23
.github/actions/load-n8n-docker/action.yml
vendored
23
.github/actions/load-n8n-docker/action.yml
vendored
|
|
@ -1,23 +0,0 @@
|
|||
# Restores the n8n + runners image tarball (produced by build-n8n-docker
|
||||
# under the SHA-derived cache key) and loads both images into the local
|
||||
# docker daemon.
|
||||
#
|
||||
# After this action runs, `n8nio/n8n:local` and `n8nio/runners:local` are
|
||||
# present on the runner.
|
||||
|
||||
name: 'Load n8n Docker images from cache'
|
||||
description: 'Restores the zstd-compressed n8n + runners image tarball from the SHA-keyed GHA cache and loads both images into the local docker daemon.'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Restore image tarball from cache
|
||||
uses: actions/cache/restore@640a1c2554105b57832a23eea0b4672fc7a790d5 # v4.2.3
|
||||
with:
|
||||
key: n8n-docker-image-${{ github.sha }}
|
||||
path: /tmp/n8n-image.tar.zst
|
||||
fail-on-cache-miss: true
|
||||
|
||||
- name: Load n8n and runners images into docker
|
||||
shell: bash
|
||||
run: zstd -d -c /tmp/n8n-image.tar.zst | docker load
|
||||
23
.github/actions/setup-nodejs/action.yml
vendored
23
.github/actions/setup-nodejs/action.yml
vendored
|
|
@ -45,19 +45,13 @@ runs:
|
|||
mkdir -p "$PNPM_STORE_PATH"
|
||||
fi
|
||||
|
||||
- name: Configure SafeChain
|
||||
shell: bash
|
||||
run: |
|
||||
# SafeChain only reads configs from this directory https://github.com/AikidoSec/safe-chain#configuration-options-1
|
||||
mkdir -p "$HOME/.safe-chain"
|
||||
cp "${{ github.action_path }}/safe-chain.config.json" "$HOME/.safe-chain/config.json"
|
||||
|
||||
- name: Install Aikido SafeChain
|
||||
if: runner.os != 'Windows'
|
||||
run: |
|
||||
VERSION="1.5.1"
|
||||
EXPECTED_SHA256="7c910fff717649c86cc8ca960e6c054d3734da2d660050e3bcfc54029e3b485b"
|
||||
node .github/scripts/retry.mjs --attempts 3 --delay 10 -- \
|
||||
curl -fsSL -o install-safe-chain.sh "https://github.com/AikidoSec/safe-chain/releases/download/${VERSION}/install-safe-chain.sh"
|
||||
VERSION="1.4.1"
|
||||
EXPECTED_SHA256="628235987175072a4255aa3f5f0128f31795b63970f1970ae8a04d07bf8527b0"
|
||||
node .github/scripts/retry.mjs --attempts 3 --delay 10 \
|
||||
"curl -fsSL -o install-safe-chain.sh https://github.com/AikidoSec/safe-chain/releases/download/${VERSION}/install-safe-chain.sh"
|
||||
echo "${EXPECTED_SHA256} install-safe-chain.sh" | sha256sum -c -
|
||||
sh install-safe-chain.sh --ci
|
||||
rm install-safe-chain.sh
|
||||
|
|
@ -66,11 +60,16 @@ runs:
|
|||
- name: Install Dependencies
|
||||
if: ${{ inputs.install-command != '' }}
|
||||
env:
|
||||
INSTALL_COMMAND: ${{ inputs.install-command }}
|
||||
INSTALL_COMMAND: ${{ inputs.install-command }}
|
||||
run: |
|
||||
$INSTALL_COMMAND
|
||||
shell: bash
|
||||
|
||||
- name: Disable safe-chain
|
||||
if: runner.os != 'Windows'
|
||||
run: safe-chain teardown
|
||||
shell: bash
|
||||
|
||||
- name: Configure Turborepo Cache
|
||||
uses: rharkor/caching-for-turbo@0abc2381e688c4d2832f0665a68a01c6e82f0d6c # v2.3.11
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
{
|
||||
"npm": {
|
||||
"minimumPackageAgeExclusions": [
|
||||
"@n8n/*",
|
||||
"@n8n_io/*",
|
||||
"n8n",
|
||||
"n8n-containers",
|
||||
"n8n-core",
|
||||
"n8n-editor-ui",
|
||||
"n8n-node-dev",
|
||||
"n8n-nodes-base",
|
||||
"n8n-playwright",
|
||||
"n8n-workflow"
|
||||
]
|
||||
}
|
||||
}
|
||||
31
.github/docker-compose.yml
vendored
31
.github/docker-compose.yml
vendored
|
|
@ -1,4 +1,35 @@
|
|||
services:
|
||||
mariadb:
|
||||
image: mariadb:10.5
|
||||
environment:
|
||||
- MARIADB_DATABASE=n8n
|
||||
- MARIADB_ROOT_PASSWORD=password
|
||||
- MARIADB_MYSQL_LOCALHOST_USER=true
|
||||
ports:
|
||||
- 3306:3306
|
||||
tmpfs:
|
||||
- /var/lib/mysql
|
||||
healthcheck:
|
||||
test: ['CMD', 'mysqladmin', 'ping', '-h', 'localhost', '-u', 'root', '-ppassword']
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
retries: 10
|
||||
|
||||
mysql-8.4:
|
||||
image: mysql:8.4
|
||||
environment:
|
||||
- MYSQL_DATABASE=n8n
|
||||
- MYSQL_ROOT_PASSWORD=password
|
||||
ports:
|
||||
- 3306:3306
|
||||
tmpfs:
|
||||
- /var/lib/mysql
|
||||
healthcheck:
|
||||
test: ['CMD', 'mysqladmin', 'ping', '-h', 'localhost', '-u', 'root', '-ppassword']
|
||||
interval: 5s
|
||||
timeout: 10s
|
||||
retries: 10
|
||||
|
||||
postgres:
|
||||
image: postgres:16
|
||||
restart: always
|
||||
|
|
|
|||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
|
|
@ -10,14 +10,13 @@ Photos and videos are recommended.
|
|||
<!--
|
||||
Include links to **Linear ticket** or Github issue or Community forum post.
|
||||
Important in order to close *automatically* and provide context to reviewers.
|
||||
https://linear.app/n8n/issue/[TICKET-ID]
|
||||
https://linear.app/n8n/issue/
|
||||
-->
|
||||
<!-- Use "closes #<issue-number>", "fixes #<issue-number>", or "resolves #<issue-number>" to automatically close issues when the PR is merged. -->
|
||||
|
||||
|
||||
## Review / Merge checklist
|
||||
|
||||
- [ ] I have seen this code, I have run this code, and I take responsibility for this code.
|
||||
- [ ] PR title and summary are descriptive. ([conventions](../blob/master/.github/pull_request_title_conventions.md)) <!--
|
||||
**Remember, the title automatically goes into the changelog.
|
||||
Use `(no-changelog)` otherwise.**
|
||||
|
|
|
|||
367
.github/scripts/bump-versions.mjs
vendored
367
.github/scripts/bump-versions.mjs
vendored
|
|
@ -11,7 +11,7 @@ const exec = promisify(child_process.exec);
|
|||
/**
|
||||
* @param {string | semver.SemVer} currentVersion
|
||||
*/
|
||||
export function generateExperimentalVersion(currentVersion) {
|
||||
function generateExperimentalVersion(currentVersion) {
|
||||
const parsed = semver.parse(currentVersion);
|
||||
if (!parsed) throw new Error(`Invalid version: ${currentVersion}`);
|
||||
|
||||
|
|
@ -28,31 +28,84 @@ export function generateExperimentalVersion(currentVersion) {
|
|||
return `${parsed.major}.${parsed.minor}.${parsed.patch}-exp.0`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{ pnpm?: { overrides?: Record<string, string> }, overrides?: Record<string, string> }} pkg
|
||||
* @returns {Record<string, string>}
|
||||
*/
|
||||
export function getOverrides(pkg) {
|
||||
return { ...pkg.pnpm?.overrides, ...pkg.overrides };
|
||||
const rootDir = process.cwd();
|
||||
|
||||
const releaseType = /** @type { import('semver').ReleaseType | "experimental" } */ (
|
||||
process.env.RELEASE_TYPE
|
||||
);
|
||||
assert.match(releaseType, /^(patch|minor|major|experimental|premajor)$/, 'Invalid RELEASE_TYPE');
|
||||
|
||||
// TODO: if releaseType is `auto` determine release type based on the changelog
|
||||
|
||||
const lastTag = (await exec('git describe --tags --match "n8n@*" --abbrev=0')).stdout.trim();
|
||||
const packages = JSON.parse(
|
||||
(
|
||||
await exec(
|
||||
`pnpm ls -r --only-projects --json | jq -r '[.[] | { name: .name, version: .version, path: .path, private: .private}]'`,
|
||||
)
|
||||
).stdout,
|
||||
);
|
||||
|
||||
const packageMap = {};
|
||||
for (let { name, path, version, private: isPrivate } of packages) {
|
||||
if (isPrivate && path !== rootDir) {
|
||||
continue;
|
||||
}
|
||||
if (path === rootDir) {
|
||||
name = 'monorepo-root';
|
||||
}
|
||||
|
||||
const isDirty = await exec(`git diff --quiet HEAD ${lastTag} -- ${path}`)
|
||||
.then(() => false)
|
||||
.catch((error) => true);
|
||||
|
||||
packageMap[name] = { path, isDirty, version };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} content
|
||||
* @returns {Record<string, unknown>}
|
||||
*/
|
||||
export function parseWorkspaceYaml(content) {
|
||||
assert.ok(
|
||||
Object.values(packageMap).some(({ isDirty }) => isDirty),
|
||||
'No changes found since the last release',
|
||||
);
|
||||
|
||||
// Propagate isDirty transitively: if a package's dependency will be bumped,
|
||||
// that package also needs a bump (e.g. design-system → editor-ui → cli).
|
||||
|
||||
// Detect root-level changes that affect resolved dep versions without touching individual
|
||||
// package.json files: pnpm.overrides (applies to all specifiers)
|
||||
// and pnpm-workspace.yaml catalog entries (applies only to deps using a "catalog:…" specifier).
|
||||
|
||||
const rootPkgJson = JSON.parse(await readFile(resolve(rootDir, 'package.json'), 'utf-8'));
|
||||
const rootPkgJsonAtTag = await exec(`git show ${lastTag}:package.json`)
|
||||
.then(({ stdout }) => JSON.parse(stdout))
|
||||
.catch(() => ({}));
|
||||
|
||||
const getOverrides = (pkg) => ({ ...pkg.pnpm?.overrides, ...pkg.overrides });
|
||||
|
||||
const currentOverrides = getOverrides(rootPkgJson);
|
||||
const previousOverrides = getOverrides(rootPkgJsonAtTag);
|
||||
|
||||
const changedOverrides = new Set(
|
||||
Object.keys({ ...currentOverrides, ...previousOverrides }).filter(
|
||||
(k) => currentOverrides[k] !== previousOverrides[k],
|
||||
),
|
||||
);
|
||||
|
||||
const parseWorkspaceYaml = (content) => {
|
||||
try {
|
||||
return /** @type {Record<string, unknown>} */ (parse(content) ?? {});
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Record<string, unknown>} ws
|
||||
* @returns {Map<string, Record<string, string>>}
|
||||
*/
|
||||
export function getCatalogs(ws) {
|
||||
};
|
||||
const workspaceYaml = parseWorkspaceYaml(
|
||||
await readFile(resolve(rootDir, 'pnpm-workspace.yaml'), 'utf-8').catch(() => ''),
|
||||
);
|
||||
const workspaceYamlAtTag = parseWorkspaceYaml(
|
||||
await exec(`git show ${lastTag}:pnpm-workspace.yaml`)
|
||||
.then(({ stdout }) => stdout)
|
||||
.catch(() => ''),
|
||||
);
|
||||
const getCatalogs = (ws) => {
|
||||
const result = new Map();
|
||||
if (ws.catalog) {
|
||||
result.set('default', /** @type {Record<string,string>} */ (ws.catalog));
|
||||
|
|
@ -63,232 +116,98 @@ export function getCatalogs(ws) {
|
|||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
// changedCatalogEntries: Map<catalogName, Set<depName>>
|
||||
const currentCatalogs = getCatalogs(workspaceYaml);
|
||||
const previousCatalogs = getCatalogs(workspaceYamlAtTag);
|
||||
const changedCatalogEntries = new Map();
|
||||
for (const catalogName of new Set([...currentCatalogs.keys(), ...previousCatalogs.keys()])) {
|
||||
const current = currentCatalogs.get(catalogName) ?? {};
|
||||
const previous = previousCatalogs.get(catalogName) ?? {};
|
||||
const changedDeps = new Set(
|
||||
Object.keys({ ...current, ...previous }).filter((dep) => current[dep] !== previous[dep]),
|
||||
);
|
||||
if (changedDeps.size > 0) {
|
||||
changedCatalogEntries.set(catalogName, changedDeps);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Record<string, string>} currentOverrides
|
||||
* @param {Record<string, string>} previousOverrides
|
||||
* @returns {Set<string>}
|
||||
*/
|
||||
export function computeChangedOverrides(currentOverrides, previousOverrides) {
|
||||
return new Set(
|
||||
Object.keys({ ...currentOverrides, ...previousOverrides }).filter(
|
||||
(k) => currentOverrides[k] !== previousOverrides[k],
|
||||
),
|
||||
// Store full dep objects (with specifiers) so we can inspect "catalog:…" values below.
|
||||
const depsByPackage = {};
|
||||
for (const packageName in packageMap) {
|
||||
const packageFile = resolve(packageMap[packageName].path, 'package.json');
|
||||
const packageJson = JSON.parse(await readFile(packageFile, 'utf-8'));
|
||||
depsByPackage[packageName] = /** @type {Record<string,string>} */ (
|
||||
packageJson.dependencies ?? {}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Map<string, Record<string, string>>} currentCatalogs
|
||||
* @param {Map<string, Record<string, string>>} previousCatalogs
|
||||
* @returns {Map<string, Set<string>>}
|
||||
*/
|
||||
export function computeChangedCatalogEntries(currentCatalogs, previousCatalogs) {
|
||||
const changedCatalogEntries = new Map();
|
||||
for (const catalogName of new Set([...currentCatalogs.keys(), ...previousCatalogs.keys()])) {
|
||||
const current = currentCatalogs.get(catalogName) ?? {};
|
||||
const previous = previousCatalogs.get(catalogName) ?? {};
|
||||
const changedDeps = new Set(
|
||||
Object.keys({ ...current, ...previous }).filter((dep) => current[dep] !== previous[dep]),
|
||||
);
|
||||
if (changedDeps.size > 0) {
|
||||
changedCatalogEntries.set(catalogName, changedDeps);
|
||||
// Mark packages dirty if any dep had a root-level override or catalog version change.
|
||||
for (const [packageName, deps] of Object.entries(depsByPackage)) {
|
||||
if (packageMap[packageName].isDirty) continue;
|
||||
for (const [dep, specifier] of Object.entries(deps)) {
|
||||
if (changedOverrides.has(dep)) {
|
||||
packageMap[packageName].isDirty = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return changedCatalogEntries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark packages as dirty if any dep had a root-level override or catalog version change.
|
||||
* Mutates packageMap in place.
|
||||
*
|
||||
* @param {Record<string, { isDirty: boolean }>} packageMap
|
||||
* @param {Record<string, Record<string, string>>} depsByPackage
|
||||
* @param {Set<string>} changedOverrides
|
||||
* @param {Map<string, Set<string>>} changedCatalogEntries
|
||||
*/
|
||||
export function markDirtyByRootChanges(
|
||||
packageMap,
|
||||
depsByPackage,
|
||||
changedOverrides,
|
||||
changedCatalogEntries,
|
||||
) {
|
||||
for (const [packageName, deps] of Object.entries(depsByPackage)) {
|
||||
if (packageMap[packageName].isDirty) continue;
|
||||
for (const [dep, specifier] of Object.entries(deps)) {
|
||||
if (changedOverrides.has(dep)) {
|
||||
if (typeof specifier === 'string' && specifier.startsWith('catalog:')) {
|
||||
const catalogName = specifier === 'catalog:' ? 'default' : specifier.slice(8);
|
||||
if (changedCatalogEntries.get(catalogName)?.has(dep)) {
|
||||
packageMap[packageName].isDirty = true;
|
||||
break;
|
||||
}
|
||||
if (typeof specifier === 'string' && specifier.startsWith('catalog:')) {
|
||||
const catalogName = specifier === 'catalog:' ? 'default' : specifier.slice(8);
|
||||
if (changedCatalogEntries.get(catalogName)?.has(dep)) {
|
||||
packageMap[packageName].isDirty = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Propagate isDirty transitively: if a package's dependency will be bumped,
|
||||
* that package also needs a bump. Mutates packageMap in place.
|
||||
*
|
||||
* @param {Record<string, { isDirty: boolean }>} packageMap
|
||||
* @param {Record<string, Record<string, string>>} depsByPackage
|
||||
*/
|
||||
export function propagateDirtyTransitively(packageMap, depsByPackage) {
|
||||
let changed = true;
|
||||
while (changed) {
|
||||
changed = false;
|
||||
for (const packageName in packageMap) {
|
||||
if (packageMap[packageName].isDirty) continue;
|
||||
if (Object.keys(depsByPackage[packageName]).some((dep) => packageMap[dep]?.isDirty)) {
|
||||
packageMap[packageName].isDirty = true;
|
||||
changed = true;
|
||||
}
|
||||
let changed = true;
|
||||
while (changed) {
|
||||
changed = false;
|
||||
for (const packageName in packageMap) {
|
||||
if (packageMap[packageName].isDirty) continue;
|
||||
if (Object.keys(depsByPackage[packageName]).some((dep) => packageMap[dep]?.isDirty)) {
|
||||
packageMap[packageName].isDirty = true;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} version
|
||||
* @param {import('semver').ReleaseType | 'experimental'} releaseType
|
||||
* @returns {string}
|
||||
*/
|
||||
export function computeNewVersion(version, releaseType) {
|
||||
switch (releaseType) {
|
||||
case 'experimental':
|
||||
return generateExperimentalVersion(version);
|
||||
case 'premajor':
|
||||
return /** @type {string} */ (
|
||||
semver.inc(
|
||||
// Keep the monorepo version up to date with the released version
|
||||
packageMap['monorepo-root'].version = packageMap['n8n'].version;
|
||||
|
||||
for (const packageName in packageMap) {
|
||||
const { path, version, isDirty } = packageMap[packageName];
|
||||
const packageFile = resolve(path, 'package.json');
|
||||
const packageJson = JSON.parse(await readFile(packageFile, 'utf-8'));
|
||||
|
||||
const dependencyIsDirty = Object.keys(packageJson.dependencies || {}).some(
|
||||
(dependencyName) => packageMap[dependencyName]?.isDirty,
|
||||
);
|
||||
|
||||
let newVersion = version;
|
||||
|
||||
if (isDirty || dependencyIsDirty) {
|
||||
switch (releaseType) {
|
||||
case 'experimental':
|
||||
newVersion = generateExperimentalVersion(version);
|
||||
break;
|
||||
case 'premajor':
|
||||
newVersion = semver.inc(
|
||||
version,
|
||||
version.includes('-rc.') ? 'prerelease' : 'premajor',
|
||||
undefined,
|
||||
'rc',
|
||||
)
|
||||
);
|
||||
default:
|
||||
return /** @type {string} */ (semver.inc(version, releaseType));
|
||||
);
|
||||
break;
|
||||
default:
|
||||
newVersion = semver.inc(version, releaseType);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
packageJson.version = packageMap[packageName].nextVersion = newVersion;
|
||||
|
||||
await writeFile(packageFile, JSON.stringify(packageJson, null, 2) + '\n');
|
||||
}
|
||||
|
||||
async function bumpVersions() {
|
||||
const rootDir = process.cwd();
|
||||
|
||||
const releaseType = /** @type { import('semver').ReleaseType | "experimental" } */ (
|
||||
process.env.RELEASE_TYPE
|
||||
);
|
||||
assert.match(releaseType, /^(patch|minor|major|experimental|premajor)$/, 'Invalid RELEASE_TYPE');
|
||||
|
||||
// TODO: if releaseType is `auto` determine release type based on the changelog
|
||||
|
||||
const lastTag = (await exec('git describe --tags --match "n8n@*" --abbrev=0')).stdout.trim();
|
||||
const packages = JSON.parse(
|
||||
(
|
||||
await exec(
|
||||
`pnpm ls -r --only-projects --json | jq -r '[.[] | { name: .name, version: .version, path: .path, private: .private}]'`,
|
||||
)
|
||||
).stdout,
|
||||
);
|
||||
|
||||
/** @type {Record<string, { path: string, isDirty: boolean, version: string, nextVersion?: string }>} */
|
||||
const packageMap = {};
|
||||
for (let { name, path, version, private: isPrivate } of packages) {
|
||||
if (isPrivate && path !== rootDir) {
|
||||
continue;
|
||||
}
|
||||
if (path === rootDir) {
|
||||
name = 'monorepo-root';
|
||||
}
|
||||
|
||||
const isDirty = await exec(`git diff --quiet HEAD ${lastTag} -- ${path}`)
|
||||
.then(() => false)
|
||||
.catch(() => true);
|
||||
|
||||
packageMap[name] = { path, isDirty, version };
|
||||
}
|
||||
|
||||
assert.ok(
|
||||
Object.values(packageMap).some(({ isDirty }) => isDirty),
|
||||
'No changes found since the last release',
|
||||
);
|
||||
|
||||
// Propagate isDirty transitively: if a package's dependency will be bumped,
|
||||
// that package also needs a bump (e.g. design-system → editor-ui → cli).
|
||||
|
||||
// Detect root-level changes that affect resolved dep versions without touching individual
|
||||
// package.json files: pnpm.overrides (applies to all specifiers)
|
||||
// and pnpm-workspace.yaml catalog entries (applies only to deps using a "catalog:…" specifier).
|
||||
|
||||
const rootPkgJson = JSON.parse(await readFile(resolve(rootDir, 'package.json'), 'utf-8'));
|
||||
const rootPkgJsonAtTag = await exec(`git show ${lastTag}:package.json`)
|
||||
.then(({ stdout }) => JSON.parse(stdout))
|
||||
.catch(() => ({}));
|
||||
|
||||
const changedOverrides = computeChangedOverrides(
|
||||
getOverrides(rootPkgJson),
|
||||
getOverrides(rootPkgJsonAtTag),
|
||||
);
|
||||
|
||||
const workspaceYaml = parseWorkspaceYaml(
|
||||
await readFile(resolve(rootDir, 'pnpm-workspace.yaml'), 'utf-8').catch(() => ''),
|
||||
);
|
||||
const workspaceYamlAtTag = parseWorkspaceYaml(
|
||||
await exec(`git show ${lastTag}:pnpm-workspace.yaml`)
|
||||
.then(({ stdout }) => stdout)
|
||||
.catch(() => ''),
|
||||
);
|
||||
const changedCatalogEntries = computeChangedCatalogEntries(
|
||||
getCatalogs(workspaceYaml),
|
||||
getCatalogs(workspaceYamlAtTag),
|
||||
);
|
||||
|
||||
// Store full dep objects (with specifiers) so we can inspect "catalog:…" values below.
|
||||
/** @type {Record<string, Record<string, string>>} */
|
||||
const depsByPackage = {};
|
||||
for (const packageName in packageMap) {
|
||||
const packageFile = resolve(packageMap[packageName].path, 'package.json');
|
||||
const packageJson = JSON.parse(await readFile(packageFile, 'utf-8'));
|
||||
depsByPackage[packageName] = /** @type {Record<string,string>} */ (
|
||||
packageJson.dependencies ?? {}
|
||||
);
|
||||
}
|
||||
|
||||
// Mark packages dirty if any dep had a root-level override or catalog version change.
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, changedOverrides, changedCatalogEntries);
|
||||
|
||||
propagateDirtyTransitively(packageMap, depsByPackage);
|
||||
|
||||
// Keep the monorepo version up to date with the released version
|
||||
packageMap['monorepo-root'].version = packageMap['n8n'].version;
|
||||
|
||||
for (const packageName in packageMap) {
|
||||
const { path, version, isDirty } = packageMap[packageName];
|
||||
const packageFile = resolve(path, 'package.json');
|
||||
const packageJson = JSON.parse(await readFile(packageFile, 'utf-8'));
|
||||
|
||||
const dependencyIsDirty = Object.keys(packageJson.dependencies || {}).some(
|
||||
(dependencyName) => packageMap[dependencyName]?.isDirty,
|
||||
);
|
||||
|
||||
let newVersion = version;
|
||||
|
||||
if (isDirty || dependencyIsDirty) {
|
||||
newVersion = computeNewVersion(version, releaseType);
|
||||
}
|
||||
|
||||
packageJson.version = packageMap[packageName].nextVersion = newVersion;
|
||||
|
||||
await writeFile(packageFile, JSON.stringify(packageJson, null, 2) + '\n');
|
||||
}
|
||||
|
||||
console.log(packageMap['n8n'].nextVersion);
|
||||
}
|
||||
|
||||
// only run when executed directly, not when imported by tests
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
bumpVersions();
|
||||
}
|
||||
console.log(packageMap['n8n'].nextVersion);
|
||||
|
|
|
|||
380
.github/scripts/bump-versions.test.mjs
vendored
380
.github/scripts/bump-versions.test.mjs
vendored
|
|
@ -1,380 +0,0 @@
|
|||
/**
|
||||
* Run these tests with:
|
||||
*
|
||||
* node --test ./.github/scripts/bump-versions.test.mjs
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import {
|
||||
generateExperimentalVersion,
|
||||
getOverrides,
|
||||
parseWorkspaceYaml,
|
||||
getCatalogs,
|
||||
computeChangedOverrides,
|
||||
computeChangedCatalogEntries,
|
||||
markDirtyByRootChanges,
|
||||
propagateDirtyTransitively,
|
||||
computeNewVersion,
|
||||
} from './bump-versions.mjs';
|
||||
|
||||
describe('generateExperimentalVersion', () => {
|
||||
it('creates -exp.0 from a stable version', () => {
|
||||
assert.equal(generateExperimentalVersion('1.2.3'), '1.2.3-exp.0');
|
||||
});
|
||||
|
||||
it('increments exp minor when already at exp.0', () => {
|
||||
assert.equal(generateExperimentalVersion('1.2.3-exp.0'), '1.2.3-exp.1');
|
||||
});
|
||||
|
||||
it('increments exp minor when already at exp.5', () => {
|
||||
assert.equal(generateExperimentalVersion('1.2.3-exp.5'), '1.2.3-exp.6');
|
||||
});
|
||||
|
||||
it('creates -exp.0 from a version with a different pre-release tag', () => {
|
||||
assert.equal(generateExperimentalVersion('1.2.3-beta.1'), '1.2.3-exp.0');
|
||||
});
|
||||
|
||||
it('handles multi-digit version numbers', () => {
|
||||
assert.equal(generateExperimentalVersion('10.20.30'), '10.20.30-exp.0');
|
||||
});
|
||||
|
||||
it('throws on an invalid version string', () => {
|
||||
assert.throws(() => generateExperimentalVersion('not-a-version'), /Invalid version/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getOverrides', () => {
|
||||
it('returns empty object when no overrides exist', () => {
|
||||
assert.deepEqual(getOverrides({}), {});
|
||||
});
|
||||
|
||||
it('returns pnpm.overrides when only pnpm.overrides is set', () => {
|
||||
assert.deepEqual(getOverrides({ pnpm: { overrides: { lodash: '^4.0.0' } } }), {
|
||||
lodash: '^4.0.0',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns overrides when only top-level overrides is set', () => {
|
||||
assert.deepEqual(getOverrides({ overrides: { lodash: '^4.0.0' } }), { lodash: '^4.0.0' });
|
||||
});
|
||||
|
||||
it('merges both fields with top-level overrides taking precedence for the same key', () => {
|
||||
assert.deepEqual(
|
||||
getOverrides({
|
||||
pnpm: { overrides: { lodash: '^3.0.0', underscore: '^1.0.0' } },
|
||||
overrides: { lodash: '^4.0.0' },
|
||||
}),
|
||||
{ lodash: '^4.0.0', underscore: '^1.0.0' },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseWorkspaceYaml', () => {
|
||||
it('parses valid YAML into an object', () => {
|
||||
assert.deepEqual(parseWorkspaceYaml('catalog:\n lodash: "^4.0.0"'), {
|
||||
catalog: { lodash: '^4.0.0' },
|
||||
});
|
||||
});
|
||||
|
||||
it('returns empty object for an empty string', () => {
|
||||
assert.deepEqual(parseWorkspaceYaml(''), {});
|
||||
});
|
||||
|
||||
it('returns empty object for invalid YAML', () => {
|
||||
assert.deepEqual(parseWorkspaceYaml(': - invalid: [yaml}'), {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCatalogs', () => {
|
||||
it('returns empty map when no catalog or catalogs field exists', () => {
|
||||
assert.equal(getCatalogs({}).size, 0);
|
||||
});
|
||||
|
||||
it('returns a "default" entry for the top-level catalog field', () => {
|
||||
const result = getCatalogs({ catalog: { lodash: '^4.0.0' } });
|
||||
assert.equal(result.size, 1);
|
||||
assert.deepEqual(result.get('default'), { lodash: '^4.0.0' });
|
||||
});
|
||||
|
||||
it('returns named entries from the catalogs field', () => {
|
||||
const result = getCatalogs({ catalogs: { react18: { react: '^18.0.0' } } });
|
||||
assert.equal(result.size, 1);
|
||||
assert.deepEqual(result.get('react18'), { react: '^18.0.0' });
|
||||
});
|
||||
|
||||
it('returns both default and named catalog entries when both fields are present', () => {
|
||||
const result = getCatalogs({
|
||||
catalog: { lodash: '^4.0.0' },
|
||||
catalogs: { react18: { react: '^18.0.0' } },
|
||||
});
|
||||
assert.equal(result.size, 2);
|
||||
assert.deepEqual(result.get('default'), { lodash: '^4.0.0' });
|
||||
assert.deepEqual(result.get('react18'), { react: '^18.0.0' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeChangedOverrides', () => {
|
||||
it('returns empty set when nothing changed', () => {
|
||||
assert.equal(computeChangedOverrides({ lodash: '^4' }, { lodash: '^4' }).size, 0);
|
||||
});
|
||||
|
||||
it('detects an added override', () => {
|
||||
const result = computeChangedOverrides({ lodash: '^4' }, {});
|
||||
assert.ok(result.has('lodash'));
|
||||
});
|
||||
|
||||
it('detects a removed override', () => {
|
||||
const result = computeChangedOverrides({}, { lodash: '^4' });
|
||||
assert.ok(result.has('lodash'));
|
||||
});
|
||||
|
||||
it('detects a changed override value', () => {
|
||||
const result = computeChangedOverrides({ lodash: '^4' }, { lodash: '^3' });
|
||||
assert.ok(result.has('lodash'));
|
||||
});
|
||||
|
||||
it('does not include unchanged overrides', () => {
|
||||
const result = computeChangedOverrides(
|
||||
{ lodash: '^4', underscore: '^1' },
|
||||
{ lodash: '^4', underscore: '^1' },
|
||||
);
|
||||
assert.equal(result.size, 0);
|
||||
});
|
||||
|
||||
it('handles mixed changed and unchanged overrides', () => {
|
||||
const result = computeChangedOverrides(
|
||||
{ lodash: '^4', underscore: '^2' },
|
||||
{ lodash: '^4', underscore: '^1' },
|
||||
);
|
||||
assert.equal(result.size, 1);
|
||||
assert.ok(result.has('underscore'));
|
||||
assert.ok(!result.has('lodash'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeChangedCatalogEntries', () => {
|
||||
it('returns empty map when nothing changed', () => {
|
||||
const current = new Map([['default', { lodash: '^4' }]]);
|
||||
const previous = new Map([['default', { lodash: '^4' }]]);
|
||||
assert.equal(computeChangedCatalogEntries(current, previous).size, 0);
|
||||
});
|
||||
|
||||
it('detects an added dep in a catalog', () => {
|
||||
const current = new Map([['default', { lodash: '^4' }]]);
|
||||
const previous = new Map([['default', {}]]);
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.get('default')?.has('lodash'));
|
||||
});
|
||||
|
||||
it('detects a removed dep from a catalog', () => {
|
||||
const current = new Map([['default', {}]]);
|
||||
const previous = new Map([['default', { lodash: '^4' }]]);
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.get('default')?.has('lodash'));
|
||||
});
|
||||
|
||||
it('detects a changed dep version in a catalog', () => {
|
||||
const current = new Map([['default', { lodash: '^4' }]]);
|
||||
const previous = new Map([['default', { lodash: '^3' }]]);
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.get('default')?.has('lodash'));
|
||||
});
|
||||
|
||||
it('detects changes in a named catalog', () => {
|
||||
const current = new Map([['react18', { react: '^18' }]]);
|
||||
const previous = new Map([['react18', { react: '^17' }]]);
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.get('react18')?.has('react'));
|
||||
});
|
||||
|
||||
it('detects a newly added catalog', () => {
|
||||
const current = new Map([['newCatalog', { lodash: '^4' }]]);
|
||||
const previous = new Map();
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.get('newCatalog')?.has('lodash'));
|
||||
});
|
||||
|
||||
it('detects a removed catalog', () => {
|
||||
const current = new Map();
|
||||
const previous = new Map([['oldCatalog', { lodash: '^4' }]]);
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.get('oldCatalog')?.has('lodash'));
|
||||
});
|
||||
|
||||
it('does not include a catalog that has no changed entries', () => {
|
||||
const current = new Map([
|
||||
['default', { lodash: '^4' }],
|
||||
['react18', { react: '^18' }],
|
||||
]);
|
||||
const previous = new Map([
|
||||
['default', { lodash: '^3' }],
|
||||
['react18', { react: '^18' }],
|
||||
]);
|
||||
const result = computeChangedCatalogEntries(current, previous);
|
||||
assert.ok(result.has('default'));
|
||||
assert.ok(!result.has('react18'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('markDirtyByRootChanges', () => {
|
||||
it('marks a package dirty when its dep appears in changedOverrides', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { lodash: '^4' } };
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(['lodash']), new Map());
|
||||
assert.ok(packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('skips already-dirty packages', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: true } };
|
||||
// No deps, but package is already dirty — should not throw or change state
|
||||
const depsByPackage = { 'pkg-a': {} };
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(['lodash']), new Map());
|
||||
assert.ok(packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('marks a package dirty when its dep uses "catalog:" (default catalog) and that entry changed', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { lodash: 'catalog:' } };
|
||||
const changedCatalogEntries = new Map([['default', new Set(['lodash'])]]);
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(), changedCatalogEntries);
|
||||
assert.ok(packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('marks a package dirty when its dep uses "catalog:<name>" and that named catalog entry changed', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { react: 'catalog:react18' } };
|
||||
const changedCatalogEntries = new Map([['react18', new Set(['react'])]]);
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(), changedCatalogEntries);
|
||||
assert.ok(packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('does not mark a package dirty when none of its deps changed', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { lodash: '^4' } };
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(['underscore']), new Map());
|
||||
assert.ok(!packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('does not mark a package dirty when a catalog: dep is in a catalog with no changes', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { lodash: 'catalog:' } };
|
||||
const changedCatalogEntries = new Map([['default', new Set(['underscore'])]]);
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(), changedCatalogEntries);
|
||||
assert.ok(!packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('does not mark a package dirty when a catalog: dep is in a different catalog than the one that changed', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { react: 'catalog:react18' } };
|
||||
const changedCatalogEntries = new Map([['default', new Set(['react'])]]);
|
||||
markDirtyByRootChanges(packageMap, depsByPackage, new Set(), changedCatalogEntries);
|
||||
assert.ok(!packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
});
|
||||
|
||||
describe('propagateDirtyTransitively', () => {
|
||||
it('does nothing when no packages are dirty', () => {
|
||||
const packageMap = {
|
||||
'pkg-a': { isDirty: false },
|
||||
'pkg-b': { isDirty: false },
|
||||
};
|
||||
const depsByPackage = {
|
||||
'pkg-a': { 'pkg-b': 'workspace:*' },
|
||||
'pkg-b': {},
|
||||
};
|
||||
propagateDirtyTransitively(packageMap, depsByPackage);
|
||||
assert.ok(!packageMap['pkg-a'].isDirty);
|
||||
assert.ok(!packageMap['pkg-b'].isDirty);
|
||||
});
|
||||
|
||||
it('propagates dirty state one level up the dependency chain', () => {
|
||||
const packageMap = {
|
||||
'pkg-a': { isDirty: false },
|
||||
'pkg-b': { isDirty: true },
|
||||
};
|
||||
const depsByPackage = {
|
||||
'pkg-a': { 'pkg-b': 'workspace:*' },
|
||||
'pkg-b': {},
|
||||
};
|
||||
propagateDirtyTransitively(packageMap, depsByPackage);
|
||||
assert.ok(packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('propagates dirty state through multiple levels', () => {
|
||||
const packageMap = {
|
||||
'pkg-a': { isDirty: false },
|
||||
'pkg-b': { isDirty: false },
|
||||
'pkg-c': { isDirty: true },
|
||||
};
|
||||
const depsByPackage = {
|
||||
'pkg-a': { 'pkg-b': 'workspace:*' },
|
||||
'pkg-b': { 'pkg-c': 'workspace:*' },
|
||||
'pkg-c': {},
|
||||
};
|
||||
propagateDirtyTransitively(packageMap, depsByPackage);
|
||||
assert.ok(packageMap['pkg-b'].isDirty, 'pkg-b should be dirty (depends on dirty pkg-c)');
|
||||
assert.ok(packageMap['pkg-a'].isDirty, 'pkg-a should be dirty (depends on dirty pkg-b)');
|
||||
});
|
||||
|
||||
it('does not mark packages dirty when their deps are external (not in packageMap)', () => {
|
||||
const packageMap = { 'pkg-a': { isDirty: false } };
|
||||
const depsByPackage = { 'pkg-a': { lodash: '^4' } };
|
||||
propagateDirtyTransitively(packageMap, depsByPackage);
|
||||
assert.ok(!packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
|
||||
it('handles diamond dependency graphs without infinite loops', () => {
|
||||
// pkg-a depends on pkg-b and pkg-c; both depend on pkg-d (dirty)
|
||||
const packageMap = {
|
||||
'pkg-a': { isDirty: false },
|
||||
'pkg-b': { isDirty: false },
|
||||
'pkg-c': { isDirty: false },
|
||||
'pkg-d': { isDirty: true },
|
||||
};
|
||||
const depsByPackage = {
|
||||
'pkg-a': { 'pkg-b': 'workspace:*', 'pkg-c': 'workspace:*' },
|
||||
'pkg-b': { 'pkg-d': 'workspace:*' },
|
||||
'pkg-c': { 'pkg-d': 'workspace:*' },
|
||||
'pkg-d': {},
|
||||
};
|
||||
propagateDirtyTransitively(packageMap, depsByPackage);
|
||||
assert.ok(packageMap['pkg-b'].isDirty);
|
||||
assert.ok(packageMap['pkg-c'].isDirty);
|
||||
assert.ok(packageMap['pkg-a'].isDirty);
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeNewVersion', () => {
|
||||
it('increments patch version', () => {
|
||||
assert.equal(computeNewVersion('1.2.3', 'patch'), '1.2.4');
|
||||
});
|
||||
|
||||
it('increments minor version (resets patch)', () => {
|
||||
assert.equal(computeNewVersion('1.2.3', 'minor'), '1.3.0');
|
||||
});
|
||||
|
||||
it('increments major version (resets minor and patch)', () => {
|
||||
assert.equal(computeNewVersion('1.2.3', 'major'), '2.0.0');
|
||||
});
|
||||
|
||||
it('creates -exp.0 from a stable version for experimental', () => {
|
||||
assert.equal(computeNewVersion('1.2.3', 'experimental'), '1.2.3-exp.0');
|
||||
});
|
||||
|
||||
it('increments exp minor for experimental when already an exp version', () => {
|
||||
assert.equal(computeNewVersion('1.2.3-exp.0', 'experimental'), '1.2.3-exp.1');
|
||||
});
|
||||
|
||||
it('creates a premajor rc version from a stable version', () => {
|
||||
assert.equal(computeNewVersion('1.2.3', 'premajor'), '2.0.0-rc.0');
|
||||
});
|
||||
|
||||
it('increments the rc prerelease number for premajor when already an rc version', () => {
|
||||
assert.equal(computeNewVersion('2.0.0-rc.0', 'premajor'), '2.0.0-rc.1');
|
||||
});
|
||||
|
||||
it('increments rc correctly across multiple premajor calls', () => {
|
||||
assert.equal(computeNewVersion('2.0.0-rc.4', 'premajor'), '2.0.0-rc.5');
|
||||
});
|
||||
});
|
||||
114
.github/scripts/cla/check-signatures.mjs
vendored
114
.github/scripts/cla/check-signatures.mjs
vendored
|
|
@ -1,114 +0,0 @@
|
|||
// Invoked from .github/workflows/ci-cla-check.yml via actions/github-script.
|
||||
//
|
||||
// Collects unique commit authors for the PR (or for the commits a merge
|
||||
// queue is about to land) and asks the n8n CLA service whether each one
|
||||
// has signed. Surfaces three buckets to subsequent steps:
|
||||
// - signed : verified contributors
|
||||
// - unsigned : verified non-contributors (block the merge)
|
||||
// - errored : CLA lookup failed (block the merge — fail-closed so we
|
||||
// never green-light an unverified contribution)
|
||||
//
|
||||
// Commits whose author email is not linked to a GitHub account can't be
|
||||
// looked up by login; they're surfaced separately as `unlinked`.
|
||||
|
||||
/**
|
||||
* @typedef { InstanceType<typeof import("@actions/github/lib/utils").GitHub> } GitHubInstance
|
||||
* @typedef { import("@actions/github/lib/context").Context } Context
|
||||
* @typedef { typeof import("@actions/core") } Core
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{ github: GitHubInstance, context: Context, core: Core }} params
|
||||
*/
|
||||
export default async function checkSignatures ({ github, context, core }) {
|
||||
const { owner, repo } = context.repo;
|
||||
const prNumber = process.env.PR_NUMBER;
|
||||
const headSha = process.env.HEAD_SHA;
|
||||
const baseSha = process.env.BASE_SHA;
|
||||
const isMergeGroup = process.env.IS_MERGE_GROUP === 'true';
|
||||
|
||||
/** @type {Set<string>} */
|
||||
const authors = new Set();
|
||||
/** @type {Array<{sha: string, name: string, email: string}>} */
|
||||
const unlinkedCommits = [];
|
||||
|
||||
/**
|
||||
* @param {Array<any>} commits
|
||||
*/
|
||||
const collect = (commits) => {
|
||||
for (const c of commits) {
|
||||
// Bot-authored commits don't need a CLA; skip before the linked/unlinked split
|
||||
// so they don't fall through to `unlinkedCommits` and fail `all_signed`.
|
||||
if (c.author && c.author.type === 'Bot') continue;
|
||||
|
||||
if (c.author && c.author.login) {
|
||||
authors.add(c.author.login);
|
||||
} else if (c.commit && c.commit.author) {
|
||||
unlinkedCommits.push({
|
||||
sha: c.sha,
|
||||
name: c.commit.author.name,
|
||||
email: c.commit.author.email,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (isMergeGroup) {
|
||||
const { data: comparison } = await github.rest.repos.compareCommitsWithBasehead({
|
||||
owner,
|
||||
repo,
|
||||
basehead: `${baseSha}...${headSha}`,
|
||||
});
|
||||
collect(comparison.commits || []);
|
||||
} else if (prNumber) {
|
||||
const commits = await github.paginate(github.rest.pulls.listCommits, {
|
||||
owner,
|
||||
repo,
|
||||
pull_number: Number(prNumber),
|
||||
per_page: 100,
|
||||
});
|
||||
collect(commits);
|
||||
}
|
||||
|
||||
const loginList = [...authors];
|
||||
core.info(`Contributors to check: ${loginList.join(', ') || '(none)'}`);
|
||||
if (unlinkedCommits.length > 0) {
|
||||
core.warning(
|
||||
`${unlinkedCommits.length} commit(s) have an author email not linked to a GitHub account ` +
|
||||
'and cannot be verified against the CLA service.',
|
||||
);
|
||||
}
|
||||
|
||||
/** @type {string[]} */
|
||||
const signed = [];
|
||||
/** @type {string[]} */
|
||||
const unsigned = [];
|
||||
/** @type {string[]} */
|
||||
const errored = [];
|
||||
|
||||
for (const login of loginList) {
|
||||
const url = `${process.env.CLA_API}?checkContributor=${encodeURIComponent(login)}`;
|
||||
try {
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
if (data && data.isContributor === true) {
|
||||
signed.push(login);
|
||||
} else {
|
||||
unsigned.push(login);
|
||||
}
|
||||
} catch (e) {
|
||||
core.warning(`CLA lookup failed for @${login}: ${e instanceof Error ? e.message : String(e)}`);
|
||||
errored.push(login);
|
||||
}
|
||||
}
|
||||
|
||||
const blocking = [...unsigned, ...errored];
|
||||
const allSigned = blocking.length === 0 && unlinkedCommits.length === 0;
|
||||
|
||||
core.setOutput('signed', signed.join(','));
|
||||
core.setOutput('unsigned', unsigned.join(','));
|
||||
core.setOutput('errored', errored.join(','));
|
||||
core.setOutput('unlinked', JSON.stringify(unlinkedCommits));
|
||||
core.setOutput('all_signed', String(allSigned));
|
||||
}
|
||||
83
.github/scripts/cla/manage-label.mjs
vendored
83
.github/scripts/cla/manage-label.mjs
vendored
|
|
@ -1,83 +0,0 @@
|
|||
// Invoked from .github/workflows/ci-cla-check.yml via actions/github-script.
|
||||
//
|
||||
// Adds the `cla-signed` label when every contributor has signed, and
|
||||
// removes it otherwise. Idempotent: re-runs safely without duplicating
|
||||
// the label or erroring if it's already in the desired state. Creates
|
||||
// the label on first use so the workflow is self-contained.
|
||||
|
||||
/**
|
||||
* @typedef { InstanceType<typeof import("@actions/github/lib/utils").GitHub> } GitHubInstance
|
||||
* @typedef { import("@actions/github/lib/context").Context } Context
|
||||
* @typedef { typeof import("@actions/core") } Core
|
||||
*/
|
||||
|
||||
const LABEL_NAME = 'cla-signed';
|
||||
const LABEL_COLOR = '0e8a16'; // GitHub's standard green
|
||||
const LABEL_DESCRIPTION = 'All contributors on this PR have signed the CLA';
|
||||
|
||||
/**
|
||||
* @param {{ github: GitHubInstance, context: Context, core: Core }} params
|
||||
*/
|
||||
export default async function manageClaLabel({ github, context, core }) {
|
||||
const { owner, repo } = context.repo;
|
||||
const issue_number = Number(process.env.PR_NUMBER);
|
||||
const allSigned = process.env.ALL_SIGNED === 'true';
|
||||
|
||||
if (allSigned) {
|
||||
// Make sure the label exists before trying to apply it — addLabels
|
||||
// errors if the label is missing from the repo.
|
||||
try {
|
||||
await github.rest.issues.getLabel({ owner, repo, name: LABEL_NAME });
|
||||
} catch (e) {
|
||||
if (errorStatus(e) === 404) {
|
||||
try {
|
||||
await github.rest.issues.createLabel({
|
||||
owner,
|
||||
repo,
|
||||
name: LABEL_NAME,
|
||||
color: LABEL_COLOR,
|
||||
description: LABEL_DESCRIPTION,
|
||||
});
|
||||
} catch (createErr) {
|
||||
// 422 = race with a parallel run that just created it. Fine.
|
||||
if (errorStatus(createErr) !== 422) throw createErr;
|
||||
}
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
await github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
labels: [LABEL_NAME],
|
||||
});
|
||||
core.info(`Applied "${LABEL_NAME}" label to PR #${issue_number}`);
|
||||
} else {
|
||||
// 404 just means the label wasn't on the PR — nothing to undo.
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
name: LABEL_NAME,
|
||||
});
|
||||
core.info(`Removed "${LABEL_NAME}" label from PR #${issue_number}`);
|
||||
} catch (e) {
|
||||
if (errorStatus(e) !== 404) throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Octokit's request errors carry an HTTP `status` field, but TypeScript
|
||||
* sees catch parameters as `unknown`. This guard narrows safely.
|
||||
* @param {unknown} e
|
||||
* @returns {number | undefined}
|
||||
*/
|
||||
function errorStatus(e) {
|
||||
return typeof e === 'object' && e !== null && 'status' in e && typeof e.status === 'number'
|
||||
? e.status
|
||||
: undefined;
|
||||
}
|
||||
66
.github/scripts/cla/post-final-status.mjs
vendored
66
.github/scripts/cla/post-final-status.mjs
vendored
|
|
@ -1,66 +0,0 @@
|
|||
// Invoked from .github/workflows/ci-cla-check.yml via actions/github-script.
|
||||
//
|
||||
// Translates the buckets emitted by check-signatures.mjs into a single
|
||||
// commit status on the head SHA. The status `context` name is what a
|
||||
// repository ruleset gates on; description and target_url are best-effort
|
||||
// human signals.
|
||||
//
|
||||
// State mapping:
|
||||
// - success: every contributor is signed and every commit author is linked
|
||||
// - error : only failures were API lookup errors (transient)
|
||||
// - failure: at least one contributor is verified unsigned, or commits
|
||||
// have author emails not linked to a GitHub account
|
||||
|
||||
/**
|
||||
* @typedef { InstanceType<typeof import("@actions/github/lib/utils").GitHub> } GitHubInstance
|
||||
* @typedef { import("@actions/github/lib/context").Context } Context
|
||||
* @typedef { typeof import("@actions/core") } Core
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{ github: GitHubInstance, context: Context, core: Core }} params
|
||||
*/
|
||||
export default async function postFinalClaStatus({ github, context }) {
|
||||
const allSigned = process.env.ALL_SIGNED === 'true';
|
||||
const unsigned = (process.env.UNSIGNED ?? '').split(',').filter(Boolean);
|
||||
const errored = (process.env.ERRORED ?? '').split(',').filter(Boolean);
|
||||
const unlinked = JSON.parse(process.env.UNLINKED || '[]');
|
||||
|
||||
/** @type {'success' | 'failure' | 'error' | 'pending'} */
|
||||
let state;
|
||||
let description;
|
||||
if (allSigned) {
|
||||
state = 'success';
|
||||
description = 'All contributors have signed the CLA';
|
||||
} else if (errored.length > 0 && unsigned.length === 0 && unlinked.length === 0) {
|
||||
state = 'error';
|
||||
description = `Could not verify: ${errored.join(', ')}`;
|
||||
} else {
|
||||
state = 'failure';
|
||||
const parts = [];
|
||||
if (unsigned.length > 0) parts.push(`unsigned: ${unsigned.join(', ')}`);
|
||||
if (errored.length > 0) parts.push(`errored: ${errored.join(', ')}`);
|
||||
if (unlinked.length > 0) parts.push(`${unlinked.length} unlinked commit(s)`);
|
||||
description = parts.join(' | ');
|
||||
}
|
||||
|
||||
// GitHub commit status description is capped at 140 chars.
|
||||
if (description.length > 140) {
|
||||
description = description.slice(0, 137) + '…';
|
||||
}
|
||||
|
||||
const prNumber = process.env.PR_NUMBER;
|
||||
const target_url = prNumber
|
||||
? `${context.payload.repository?.html_url}/pull/${prNumber}`
|
||||
: process.env.CLA_SIGN_URL;
|
||||
|
||||
await github.rest.repos.createCommitStatus({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
sha: /** @type {string} */ (process.env.HEAD_SHA),
|
||||
state,
|
||||
context: /** @type {string} */ (process.env.STATUS_CONTEXT),
|
||||
description,
|
||||
target_url,
|
||||
});
|
||||
}
|
||||
76
.github/scripts/cla/resolve-context.mjs
vendored
76
.github/scripts/cla/resolve-context.mjs
vendored
|
|
@ -1,76 +0,0 @@
|
|||
// Invoked from .github/workflows/ci-cla-check.yml via actions/github-script.
|
||||
//
|
||||
// Reads the triggering event (pull_request_target, issue_comment, or
|
||||
// merge_group) and emits the head/base SHA and PR number that the rest of
|
||||
// the workflow needs. For /cla-check comments, also leaves an "eyes"
|
||||
// reaction so the commenter sees we picked it up.
|
||||
|
||||
/**
|
||||
* @typedef { InstanceType<typeof import("@actions/github/lib/utils").GitHub> } GitHubInstance
|
||||
* @typedef { import("@actions/github/lib/context").Context } Context
|
||||
* @typedef { typeof import("@actions/core") } Core
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{ github: GitHubInstance, context: Context, core: Core }} params
|
||||
*/
|
||||
export default async function resolveClaContext({ github, context, core }) {
|
||||
const { owner, repo } = context.repo;
|
||||
const event = context.eventName;
|
||||
|
||||
let prNumber = '';
|
||||
let headSha = '';
|
||||
let baseSha = '';
|
||||
let isMergeGroup = false;
|
||||
|
||||
if (event === 'pull_request_target' && context.payload.pull_request) {
|
||||
const pr = context.payload.pull_request;
|
||||
prNumber = String(pr.number);
|
||||
headSha = pr.head.sha;
|
||||
baseSha = pr.base.sha;
|
||||
} else if (event === 'issue_comment' && context.payload.issue) {
|
||||
prNumber = String(context.payload.issue.number);
|
||||
const { data: pr } = await github.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: Number(prNumber),
|
||||
});
|
||||
headSha = pr.head.sha;
|
||||
baseSha = pr.base.sha;
|
||||
|
||||
// Acknowledge the command so the commenter sees we received it.
|
||||
try {
|
||||
await github.rest.reactions.createForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: context.payload.comment?.id || -1,
|
||||
content: 'eyes',
|
||||
});
|
||||
} catch (e) {
|
||||
core.info(`Could not react to comment: ${e instanceof Error ? e.message : String(e)}`);
|
||||
}
|
||||
} else if (event === 'merge_group') {
|
||||
isMergeGroup = true;
|
||||
headSha = context.payload.merge_group.head_sha;
|
||||
baseSha = context.payload.merge_group.base_sha;
|
||||
} else if (event === 'workflow_dispatch') {
|
||||
const input = context.payload.inputs?.pr_number;
|
||||
if (!input) {
|
||||
core.setFailed('workflow_dispatch requires the pr_number input');
|
||||
return;
|
||||
}
|
||||
prNumber = String(input);
|
||||
const { data: pr } = await github.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: Number(prNumber),
|
||||
});
|
||||
headSha = pr.head.sha;
|
||||
baseSha = pr.base.sha;
|
||||
}
|
||||
|
||||
core.setOutput('pr_number', prNumber);
|
||||
core.setOutput('head_sha', headSha);
|
||||
core.setOutput('base_sha', baseSha);
|
||||
core.setOutput('is_merge_group', String(isMergeGroup));
|
||||
}
|
||||
104
.github/scripts/cla/update-pr-comment.mjs
vendored
104
.github/scripts/cla/update-pr-comment.mjs
vendored
|
|
@ -1,104 +0,0 @@
|
|||
// Invoked from .github/workflows/ci-cla-check.yml via actions/github-script.
|
||||
//
|
||||
// Maintains a single CLA comment per PR, keyed by an HTML marker so the
|
||||
// same comment is edited in place across re-runs instead of spammed.
|
||||
// A clean PR that has never been flagged gets no comment at all — only
|
||||
// PRs that needed a nudge get the eventual "thanks" follow-up.
|
||||
|
||||
/**
|
||||
* @typedef { InstanceType<typeof import("@actions/github/lib/utils").GitHub> } GitHubInstance
|
||||
* @typedef { import("@actions/github/lib/context").Context } Context
|
||||
* @typedef { typeof import("@actions/core") } Core
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{ github: GitHubInstance, context: Context, core: Core }} params
|
||||
*/
|
||||
export default async function updatePRComment({ github, context }) {
|
||||
const { owner, repo } = context.repo;
|
||||
const issue_number = Number(process.env.PR_NUMBER);
|
||||
const allSigned = process.env.ALL_SIGNED === 'true';
|
||||
const unsigned = (process.env.UNSIGNED ?? '').split(',').filter(Boolean);
|
||||
const errored = (process.env.ERRORED ?? '').split(',').filter(Boolean);
|
||||
const unlinked = JSON.parse(process.env.UNLINKED || '[]');
|
||||
const MARKER = /** @type {string} */ (process.env.COMMENT_MARKER);
|
||||
|
||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
per_page: 100,
|
||||
});
|
||||
// Only adopt the comment as ours if it's bot-authored — otherwise a user
|
||||
// who copies our marker into their own comment would either hijack the
|
||||
// thread or make updateComment 403 with insufficient permissions.
|
||||
const existing = comments.find(
|
||||
(c) => c.body && c.body.includes(MARKER) && c.user && c.user.type === 'Bot',
|
||||
);
|
||||
|
||||
let body;
|
||||
if (allSigned) {
|
||||
// Only leave a "thanks" trail if we already nudged once. Avoids
|
||||
// pinging every clean PR with a CLA comment.
|
||||
if (!existing) {
|
||||
return;
|
||||
}
|
||||
|
||||
body = [
|
||||
MARKER,
|
||||
'✅ **CLA Check passed.** All contributors on this PR have signed the n8n CLA — thank you!',
|
||||
].join('\n');
|
||||
} else {
|
||||
const lines = [MARKER, '## CLA signatures required', ''];
|
||||
lines.push(`Thank you for your submission! We really appreciate it.
|
||||
Like many open source projects, we ask that you sign our [Contributor License Agreement](${process.env.CLA_SIGN_URL}) before we can accept your contribution.`);
|
||||
lines.push('');
|
||||
|
||||
if (unsigned.length > 0) {
|
||||
lines.push('**Contributors who still need to sign:**');
|
||||
for (const u of unsigned) {
|
||||
lines.push(`- @${u}`);
|
||||
}
|
||||
lines.push('');
|
||||
}
|
||||
if (errored.length > 0) {
|
||||
lines.push('**Could not verify (will retry on next push):**');
|
||||
for (const u of errored) {
|
||||
lines.push(`- @${u}`);
|
||||
}
|
||||
lines.push('');
|
||||
}
|
||||
if (unlinked.length > 0) {
|
||||
lines.push('**Commits authored by an email not linked to a GitHub account:**');
|
||||
for (const c of unlinked) {
|
||||
lines.push(`- \`${c.sha.slice(0, 7)}\` — ${c.name} <${c.email}>`);
|
||||
}
|
||||
lines.push('');
|
||||
lines.push(
|
||||
'Add the email to your GitHub account ' +
|
||||
'([instructions](https://docs.github.com/account-and-profile/setting-up-and-managing-your-personal-account-on-github/managing-email-preferences/adding-an-email-address-to-your-github-account)) ' +
|
||||
'or amend the commits to use a linked email, then push again.',
|
||||
);
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
lines.push('Once signed, comment `/cla-check` on this PR to re-run verification.');
|
||||
body = lines.join('\n');
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: existing.id,
|
||||
body,
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
body,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -32,13 +32,6 @@ let prompt;
|
|||
|
||||
if (useRaw) {
|
||||
prompt = task;
|
||||
} else if (task.startsWith('/')) {
|
||||
// Task is a skill invocation (e.g. "/n8n:linear-issue CAT-2820").
|
||||
// Wrap it so the model invokes the Skill tool instead of implementing code.
|
||||
prompt = `# Skill Invocation
|
||||
Invoke the following skill using the Skill tool and follow its instructions.
|
||||
|
||||
${task}`;
|
||||
} else {
|
||||
// List available templates so Claude knows what exists (reads them if needed)
|
||||
const templateDir = '.github/claude-templates';
|
||||
|
|
|
|||
12
.github/scripts/determine-version-info.test.mjs
vendored
12
.github/scripts/determine-version-info.test.mjs
vendored
|
|
@ -104,4 +104,16 @@ describe('determine-tracks', () => {
|
|||
assert.equal(output.release_type, 'rc');
|
||||
assert.equal(output.rc_branch, 'release-candidate/2.10.x');
|
||||
});
|
||||
|
||||
it('Determines correct branches on 1.x', () => {
|
||||
const output = determineTrack('1.123.34');
|
||||
|
||||
assert.equal(output.track, 'v1');
|
||||
assert.equal(output.version, '1.123.34');
|
||||
assert.equal(output.previous_version, '1.123.33');
|
||||
assert.equal(output.bump, 'patch');
|
||||
assert.equal(output.new_stable_version, null);
|
||||
assert.equal(output.release_type, 'stable');
|
||||
assert.equal(output.rc_branch, '1.x');
|
||||
});
|
||||
});
|
||||
|
|
|
|||
26
.github/scripts/docker/docker-tags.mjs
vendored
26
.github/scripts/docker/docker-tags.mjs
vendored
|
|
@ -9,7 +9,7 @@ class TagGenerator {
|
|||
this.githubOutput = process.env.GITHUB_OUTPUT || null;
|
||||
}
|
||||
|
||||
generate({ image, version, platform, includeDockerHub = false, sha = '' }) {
|
||||
generate({ image, version, platform, includeDockerHub = false }) {
|
||||
let imageName = image;
|
||||
let versionSuffix = '';
|
||||
|
||||
|
|
@ -27,21 +27,6 @@ class TagGenerator {
|
|||
};
|
||||
|
||||
tags.all = [...tags.ghcr, ...tags.docker];
|
||||
|
||||
// Generate additional SHA-based tags for immutable references
|
||||
if (sha) {
|
||||
const shaVersion = `${version}-${sha}`;
|
||||
const shaPlatformTag = `${shaVersion}${versionSuffix}${platformSuffix}`;
|
||||
const shaGhcr = [`ghcr.io/${this.githubOwner}/${imageName}:${shaPlatformTag}`];
|
||||
const shaDocker = includeDockerHub
|
||||
? [`${this.dockerUsername}/${imageName}:${shaPlatformTag}`]
|
||||
: [];
|
||||
tags.all = [...tags.all, ...shaGhcr, ...shaDocker];
|
||||
tags.ghcr = [...tags.ghcr, ...shaGhcr];
|
||||
tags.docker = [...tags.docker, ...shaDocker];
|
||||
tags.shaPrimaryTag = shaGhcr[0].replace(/-amd64$|-arm64$/, '');
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
|
|
@ -55,21 +40,18 @@ class TagGenerator {
|
|||
`${prefixStr}docker_tag=${tags.docker[0] || ''}`,
|
||||
`${prefixStr}primary_tag=${primaryTag}`,
|
||||
];
|
||||
if (tags.shaPrimaryTag) {
|
||||
outputs.push(`${prefixStr}sha_primary_tag=${tags.shaPrimaryTag}`);
|
||||
}
|
||||
appendFileSync(this.githubOutput, outputs.join('\n') + '\n');
|
||||
} else {
|
||||
console.log(JSON.stringify(tags, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
generateAll({ version, platform, includeDockerHub = false, sha = '' }) {
|
||||
generateAll({ version, platform, includeDockerHub = false }) {
|
||||
const images = ['n8n', 'runners', 'runners-distroless'];
|
||||
const results = {};
|
||||
|
||||
for (const image of images) {
|
||||
const tags = this.generate({ image, version, platform, includeDockerHub, sha });
|
||||
const tags = this.generate({ image, version, platform, includeDockerHub });
|
||||
const prefix = image.replace('-distroless', '_distroless');
|
||||
results[prefix] = tags;
|
||||
|
||||
|
|
@ -104,7 +86,6 @@ if (import.meta.url === `file://${process.argv[1]}`) {
|
|||
version,
|
||||
platform: getArg('platform'),
|
||||
includeDockerHub: hasFlag('include-docker'),
|
||||
sha: getArg('sha') || '',
|
||||
});
|
||||
if (!generator.githubOutput) {
|
||||
console.log(JSON.stringify(results, null, 2));
|
||||
|
|
@ -120,7 +101,6 @@ if (import.meta.url === `file://${process.argv[1]}`) {
|
|||
version,
|
||||
platform: getArg('platform'),
|
||||
includeDockerHub: hasFlag('include-docker'),
|
||||
sha: getArg('sha') || '',
|
||||
});
|
||||
generator.output(tags);
|
||||
}
|
||||
|
|
|
|||
3
.github/scripts/package.json
vendored
3
.github/scripts/package.json
vendored
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "workflow-scripts",
|
||||
"scripts": {
|
||||
"test": "node --test --experimental-test-module-mocks ./*.test.mjs ./quality/*.test.mjs"
|
||||
"test": "node --test --experimental-test-module-mocks ./*.test.mjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/github": "9.0.0",
|
||||
|
|
@ -9,7 +9,6 @@
|
|||
"conventional-changelog": "7.2.0",
|
||||
"debug": "4.4.3",
|
||||
"glob": "13.0.6",
|
||||
"minimatch": "10.2.4",
|
||||
"semver": "7.7.4",
|
||||
"tempfile": "6.0.1",
|
||||
"yaml": "^2.8.3"
|
||||
|
|
|
|||
3
.github/scripts/pnpm-lock.yaml
vendored
3
.github/scripts/pnpm-lock.yaml
vendored
|
|
@ -23,9 +23,6 @@ importers:
|
|||
glob:
|
||||
specifier: 13.0.6
|
||||
version: 13.0.6
|
||||
minimatch:
|
||||
specifier: 10.2.4
|
||||
version: 10.2.4
|
||||
semver:
|
||||
specifier: 7.7.4
|
||||
version: 7.7.4
|
||||
|
|
|
|||
25
.github/scripts/pnpm-utils.mjs
vendored
25
.github/scripts/pnpm-utils.mjs
vendored
|
|
@ -1,25 +0,0 @@
|
|||
import child_process from 'child_process';
|
||||
import { promisify } from 'node:util';
|
||||
|
||||
const exec = promisify(child_process.exec);
|
||||
|
||||
/**
|
||||
* @typedef PnpmPackage
|
||||
* @property { string } name
|
||||
* @property { string } version
|
||||
* @property { string } path
|
||||
* @property { boolean } private
|
||||
* */
|
||||
|
||||
/**
|
||||
* @returns { Promise<PnpmPackage[]> }
|
||||
* */
|
||||
export async function getMonorepoProjects() {
|
||||
return JSON.parse(
|
||||
(
|
||||
await exec(
|
||||
`pnpm ls -r --only-projects --json | jq -r '[.[] | { name: .name, version: .version, path: .path, private: .private}]'`,
|
||||
)
|
||||
).stdout,
|
||||
);
|
||||
}
|
||||
143
.github/scripts/post-qa-metrics-comment.mjs
vendored
143
.github/scripts/post-qa-metrics-comment.mjs
vendored
|
|
@ -1,143 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
/**
|
||||
* Fetches QA metric comparisons and posts/updates a PR comment.
|
||||
*
|
||||
* Usage:
|
||||
* node .github/scripts/post-qa-metrics-comment.mjs --metrics memory-heap-used-baseline
|
||||
* node .github/scripts/post-qa-metrics-comment.mjs --metrics memory-heap-used-baseline --pr 27880 --dry-run
|
||||
*
|
||||
* Env:
|
||||
* QA_METRICS_COMMENT_WEBHOOK_URL - n8n workflow webhook (required)
|
||||
* QA_METRICS_WEBHOOK_USER/PASSWORD - Basic auth for webhook
|
||||
* GITHUB_TOKEN - For posting comments (not needed with --dry-run)
|
||||
* GITHUB_REF, GITHUB_REPOSITORY, GITHUB_SHA - Auto-set in CI
|
||||
*/
|
||||
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
const MARKER = '<!-- n8n-qa-metrics-comparison -->';
|
||||
|
||||
const { values } = parseArgs({
|
||||
options: {
|
||||
metrics: { type: 'string' },
|
||||
pr: { type: 'string' },
|
||||
'baseline-days': { type: 'string', default: '14' },
|
||||
'dry-run': { type: 'boolean', default: false },
|
||||
},
|
||||
strict: true,
|
||||
});
|
||||
|
||||
const metrics = values.metrics?.split(',').map((m) => m.trim());
|
||||
if (!metrics?.length) {
|
||||
console.error('--metrics is required (comma-separated metric names)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const pr = parseInt(values.pr ?? inferPr(), 10);
|
||||
if (!pr) {
|
||||
console.error('--pr is required (or set GITHUB_REF)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const webhookUrl = process.env.QA_METRICS_COMMENT_WEBHOOK_URL;
|
||||
if (!webhookUrl) {
|
||||
console.error('QA_METRICS_COMMENT_WEBHOOK_URL is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const repo = process.env.GITHUB_REPOSITORY ?? 'n8n-io/n8n';
|
||||
const sha = process.env.GITHUB_SHA?.slice(0, 8) ?? '';
|
||||
const baselineDays = parseInt(values['baseline-days'], 10);
|
||||
|
||||
// --- Fetch ---
|
||||
|
||||
const headers = { 'Content-Type': 'application/json' };
|
||||
const user = process.env.QA_METRICS_WEBHOOK_USER;
|
||||
const pass = process.env.QA_METRICS_WEBHOOK_PASSWORD;
|
||||
if (user && pass) {
|
||||
headers.Authorization = `Basic ${Buffer.from(`${user}:${pass}`).toString('base64')}`;
|
||||
}
|
||||
|
||||
console.log(`PR #${pr}: fetching ${metrics.join(', ')} (${baselineDays}-day baseline)`);
|
||||
|
||||
let res;
|
||||
try {
|
||||
res = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
pr_number: pr,
|
||||
github_repo: repo,
|
||||
git_sha: sha,
|
||||
baseline_days: baselineDays,
|
||||
metric_names: metrics,
|
||||
}),
|
||||
signal: AbortSignal.timeout(60_000),
|
||||
});
|
||||
} catch (err) {
|
||||
console.warn(`Webhook unreachable, skipping metrics comment: ${err.message}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text().catch(() => '');
|
||||
console.warn(`Webhook failed: ${res.status} ${res.statusText}\n${text}`);
|
||||
console.warn('Skipping metrics comment.');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const { markdown, has_data } = await res.json();
|
||||
|
||||
if (!has_data || !markdown) {
|
||||
console.log('No metric data available, skipping.');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (values['dry-run']) {
|
||||
console.log('\n--- DRY RUN ---\n');
|
||||
console.log(markdown);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// --- Post comment ---
|
||||
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
if (!token) {
|
||||
console.error('GITHUB_TOKEN is required to post comments');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const [owner, repoName] = repo.split('/');
|
||||
const ghHeaders = {
|
||||
Accept: 'application/vnd.github+json',
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const comments = await fetch(
|
||||
`https://api.github.com/repos/${owner}/${repoName}/issues/${pr}/comments?per_page=100`,
|
||||
{ headers: ghHeaders },
|
||||
).then((r) => r.json());
|
||||
|
||||
const existing = Array.isArray(comments)
|
||||
? comments.find((c) => c.body?.includes(MARKER))
|
||||
: null;
|
||||
|
||||
if (existing) {
|
||||
await fetch(
|
||||
`https://api.github.com/repos/${owner}/${repoName}/issues/comments/${existing.id}`,
|
||||
{ method: 'PATCH', headers: ghHeaders, body: JSON.stringify({ body: markdown }) },
|
||||
);
|
||||
console.log(`Updated comment ${existing.id}`);
|
||||
} else {
|
||||
const created = await fetch(
|
||||
`https://api.github.com/repos/${owner}/${repoName}/issues/${pr}/comments`,
|
||||
{ method: 'POST', headers: ghHeaders, body: JSON.stringify({ body: markdown }) },
|
||||
).then((r) => r.json());
|
||||
console.log(`Created comment ${created.id}`);
|
||||
}
|
||||
|
||||
function inferPr() {
|
||||
const match = (process.env.GITHUB_REF ?? '').match(/refs\/pull\/(\d+)/);
|
||||
return match?.[1];
|
||||
}
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
/**
|
||||
* Checks that the PR description contains a checked ownership acknowledgement checkbox.
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 – Checkbox is present and checked
|
||||
* 1 – Checkbox is missing or unchecked
|
||||
*/
|
||||
|
||||
import { initGithub, getEventFromGithubEventPath } from '../github-helpers.mjs';
|
||||
|
||||
const BOT_MARKER = '<!-- pr-ownership-check -->';
|
||||
|
||||
/**
|
||||
* Returns true if the PR body contains a checked ownership acknowledgement checkbox.
|
||||
*
|
||||
* @param {string | null | undefined} body
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function isOwnershipCheckboxChecked(body) {
|
||||
return /\[x\]\s+I have seen this code,\s+I have run this code,\s+and I take responsibility for this code/i.test(
|
||||
body ?? '',
|
||||
);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const event = getEventFromGithubEventPath();
|
||||
const pr = event.pull_request;
|
||||
const { octokit, owner, repo } = initGithub();
|
||||
|
||||
const { data: comments } = await octokit.rest.issues.listComments({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr.number,
|
||||
per_page: 100,
|
||||
});
|
||||
const botComment = comments.find((c) => c.body.includes(BOT_MARKER));
|
||||
|
||||
if (!isOwnershipCheckboxChecked(pr.body)) {
|
||||
const message = [
|
||||
BOT_MARKER,
|
||||
'## ⚠️ Ownership acknowledgement required',
|
||||
'',
|
||||
'Please add or check the following item in your PR description before this can be merged:',
|
||||
'',
|
||||
'```',
|
||||
'- [x] I have seen this code, I have run this code, and I take responsibility for this code.',
|
||||
'```',
|
||||
].join('\n');
|
||||
|
||||
if (botComment) {
|
||||
await octokit.rest.issues.updateComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: botComment.id,
|
||||
body: message,
|
||||
});
|
||||
} else {
|
||||
await octokit.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr.number,
|
||||
body: message,
|
||||
});
|
||||
}
|
||||
|
||||
console.log(
|
||||
'::error::Ownership checkbox is not checked. Add it to your PR description and check it.',
|
||||
);
|
||||
process.exit(1);
|
||||
} else if (botComment) {
|
||||
await octokit.rest.issues.deleteComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: botComment.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
await main();
|
||||
}
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
import { describe, it, before, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
/**
|
||||
* Run with:
|
||||
* node --test --experimental-test-module-mocks .github/scripts/quality/check-ownership-checkbox.test.mjs
|
||||
*/
|
||||
|
||||
mock.module('../github-helpers.mjs', {
|
||||
namedExports: {
|
||||
initGithub: () => {},
|
||||
getEventFromGithubEventPath: () => {},
|
||||
},
|
||||
});
|
||||
|
||||
let isOwnershipCheckboxChecked;
|
||||
before(async () => {
|
||||
({ isOwnershipCheckboxChecked } = await import('./check-ownership-checkbox.mjs'));
|
||||
});
|
||||
|
||||
describe('isOwnershipCheckboxChecked', () => {
|
||||
it('returns true for a checked checkbox with exact text', () => {
|
||||
const body =
|
||||
'- [x] I have seen this code, I have run this code, and I take responsibility for this code.';
|
||||
assert.ok(isOwnershipCheckboxChecked(body));
|
||||
});
|
||||
|
||||
it('returns true for uppercase [X]', () => {
|
||||
const body =
|
||||
'- [X] I have seen this code, I have run this code, and I take responsibility for this code.';
|
||||
assert.ok(isOwnershipCheckboxChecked(body));
|
||||
});
|
||||
|
||||
it('returns false for an unchecked checkbox [ ]', () => {
|
||||
const body =
|
||||
'- [ ] I have seen this code, I have run this code, and I take responsibility for this code.';
|
||||
assert.equal(isOwnershipCheckboxChecked(body), false);
|
||||
});
|
||||
|
||||
it('returns false when the checkbox is absent', () => {
|
||||
const body = '## Summary\n\nThis PR does some things.\n';
|
||||
assert.equal(isOwnershipCheckboxChecked(body), false);
|
||||
});
|
||||
|
||||
it('returns false for null body', () => {
|
||||
assert.equal(isOwnershipCheckboxChecked(null), false);
|
||||
});
|
||||
|
||||
it('returns false for undefined body', () => {
|
||||
assert.equal(isOwnershipCheckboxChecked(undefined), false);
|
||||
});
|
||||
|
||||
it('returns false for empty body', () => {
|
||||
assert.equal(isOwnershipCheckboxChecked(''), false);
|
||||
});
|
||||
|
||||
it('returns true when checkbox appears among other content', () => {
|
||||
const body = [
|
||||
'## Summary',
|
||||
'',
|
||||
'Some description here.',
|
||||
'',
|
||||
'## Checklist',
|
||||
'- [x] Tests included',
|
||||
'- [x] I have seen this code, I have run this code, and I take responsibility for this code.',
|
||||
'- [ ] Docs updated',
|
||||
].join('\n');
|
||||
assert.ok(isOwnershipCheckboxChecked(body));
|
||||
});
|
||||
|
||||
it('returns false when only other checkboxes are checked', () => {
|
||||
const body = [
|
||||
'- [x] Tests included',
|
||||
'- [x] Docs updated',
|
||||
'- [ ] I have seen this code, I have run this code, and I take responsibility for this code.',
|
||||
].join('\n');
|
||||
assert.equal(isOwnershipCheckboxChecked(body), false);
|
||||
});
|
||||
|
||||
it('is case-insensitive for the checkbox marker', () => {
|
||||
const lower =
|
||||
'- [x] i have seen this code, i have run this code, and i take responsibility for this code.';
|
||||
assert.ok(isOwnershipCheckboxChecked(lower));
|
||||
});
|
||||
});
|
||||
174
.github/scripts/quality/check-pr-size.mjs
vendored
174
.github/scripts/quality/check-pr-size.mjs
vendored
|
|
@ -1,174 +0,0 @@
|
|||
/**
|
||||
* Checks that the PR does not exceed the line addition limit.
|
||||
*
|
||||
* Files matching any pattern in EXCLUDE_PATTERNS are not counted toward the
|
||||
* limit (e.g. test files, snapshots).
|
||||
*
|
||||
* A maintainer (write access or above) can override by commenting `/size-limit-override`
|
||||
* on the PR. The override takes effect on the next pull_request event (push, reopen, etc.).
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 – PR is within the limit, or a valid override comment exists
|
||||
* 1 – PR exceeds the limit with no valid override
|
||||
*/
|
||||
|
||||
import { minimatch } from 'minimatch';
|
||||
import { initGithub, getEventFromGithubEventPath } from '../github-helpers.mjs';
|
||||
|
||||
export const SIZE_LIMIT = 1000;
|
||||
export const OVERRIDE_COMMAND = '/size-limit-override';
|
||||
|
||||
export const EXCLUDE_PATTERNS = [
|
||||
// Test files (by extension)
|
||||
'**/*.test.ts',
|
||||
'**/*.test.js',
|
||||
'**/*.test.mjs',
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.js',
|
||||
'**/*.spec.mjs',
|
||||
// Test directories
|
||||
'**/test/**',
|
||||
'**/tests/**',
|
||||
'**/__tests__/**',
|
||||
// Snapshots
|
||||
'**/__snapshots__/**',
|
||||
'**/*.snap',
|
||||
// Fixtures and mocks
|
||||
'**/fixtures/**',
|
||||
'**/__mocks__/**',
|
||||
// Dedicated testing package
|
||||
'packages/testing/**',
|
||||
// Lock file (can produce massive diffs on dependency changes)
|
||||
'pnpm-lock.yaml',
|
||||
'**/*.md',
|
||||
'**/*.mdx'
|
||||
];
|
||||
|
||||
const BOT_MARKER = '<!-- pr-size-check -->';
|
||||
|
||||
/**
|
||||
* Returns true if any comment in the list is a valid `/size-limit-override` from a
|
||||
* user with write access or above.
|
||||
*
|
||||
* @param {Array<{ body?: string, user: { login: string } | null }>} comments
|
||||
* @param {(username: string) => Promise<string>} getPermission - returns the permission level string
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
export async function hasValidOverride(comments, getPermission) {
|
||||
for (const comment of comments) {
|
||||
if (!comment.body?.startsWith(OVERRIDE_COMMAND)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!comment.user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const perm = await getPermission(comment.user.login);
|
||||
if (['admin', 'write', 'maintain'].includes(perm)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the total additions across all files, excluding those matching any exclude pattern.
|
||||
*
|
||||
* @param {Array<{ filename: string, additions: number }>} files
|
||||
* @param {string[]} excludePatterns
|
||||
* @returns {number}
|
||||
*/
|
||||
export function countFilteredAdditions(files, excludePatterns) {
|
||||
return files
|
||||
.filter((file) => !excludePatterns.some((pattern) => minimatch(file.filename, pattern)))
|
||||
.reduce((sum, file) => sum + file.additions, 0);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const event = getEventFromGithubEventPath();
|
||||
const pr = event.pull_request;
|
||||
const { octokit, owner, repo } = initGithub();
|
||||
|
||||
const files = await octokit.paginate(octokit.rest.pulls.listFiles, {
|
||||
owner,
|
||||
repo,
|
||||
pull_number: pr.number,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const additions = countFilteredAdditions(files, EXCLUDE_PATTERNS);
|
||||
|
||||
const { data: comments } = await octokit.rest.issues.listComments({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr.number,
|
||||
per_page: 100,
|
||||
sort: 'created',
|
||||
direction: 'desc',
|
||||
});
|
||||
|
||||
const overrideFound = await hasValidOverride(comments, async (username) => {
|
||||
const { data: perm } = await octokit.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username,
|
||||
});
|
||||
return perm.permission;
|
||||
});
|
||||
|
||||
const botComment = comments.find((c) => c.body?.includes(BOT_MARKER));
|
||||
|
||||
if (additions > SIZE_LIMIT && !overrideFound) {
|
||||
const message = [
|
||||
BOT_MARKER,
|
||||
`## ! PR exceeds size limit (${additions.toLocaleString()} lines added)`,
|
||||
'',
|
||||
`This PR adds **${additions.toLocaleString()} lines**, exceeding the ${SIZE_LIMIT.toLocaleString()}-line limit (test files excluded).`,
|
||||
'',
|
||||
'Large PRs are harder to review and increase the risk of bugs going unnoticed. Please consider:',
|
||||
'- Breaking this into smaller, logically separate PRs',
|
||||
'- Moving unrelated changes to a follow-up PR',
|
||||
'',
|
||||
`If the size is genuinely justified (e.g. generated code, large migrations, test fixtures), a maintainer can override by commenting \`${OVERRIDE_COMMAND}\` and then pushing a new commit or re-running this check.`,
|
||||
].join('\n');
|
||||
|
||||
if (botComment) {
|
||||
await octokit.rest.issues.updateComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: botComment.id,
|
||||
body: message,
|
||||
});
|
||||
} else {
|
||||
await octokit.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: pr.number,
|
||||
body: message,
|
||||
});
|
||||
}
|
||||
|
||||
console.log(
|
||||
`::error::PR adds ${additions.toLocaleString()} lines (test files excluded), exceeding the ${SIZE_LIMIT.toLocaleString()}-line limit. Reduce PR size or ask a maintainer to comment \`${OVERRIDE_COMMAND}\`.`,
|
||||
);
|
||||
process.exit(1);
|
||||
} else {
|
||||
if (botComment) {
|
||||
await octokit.rest.issues.deleteComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: botComment.id,
|
||||
});
|
||||
}
|
||||
if (overrideFound && additions > SIZE_LIMIT) {
|
||||
console.log(
|
||||
`PR size limit overridden. ${additions.toLocaleString()} lines added (limit: ${SIZE_LIMIT.toLocaleString()}, test files excluded).`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
await main();
|
||||
}
|
||||
215
.github/scripts/quality/check-pr-size.test.mjs
vendored
215
.github/scripts/quality/check-pr-size.test.mjs
vendored
|
|
@ -1,215 +0,0 @@
|
|||
import { describe, it, before, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
/**
|
||||
* Run with:
|
||||
* node --test --experimental-test-module-mocks .github/scripts/quality/check-pr-size.test.mjs
|
||||
*/
|
||||
|
||||
mock.module('../github-helpers.mjs', {
|
||||
namedExports: {
|
||||
initGithub: () => {},
|
||||
getEventFromGithubEventPath: () => {},
|
||||
},
|
||||
});
|
||||
|
||||
let hasValidOverride, countFilteredAdditions, SIZE_LIMIT, OVERRIDE_COMMAND, EXCLUDE_PATTERNS;
|
||||
before(async () => {
|
||||
({ hasValidOverride, countFilteredAdditions, SIZE_LIMIT, OVERRIDE_COMMAND, EXCLUDE_PATTERNS } =
|
||||
await import('./check-pr-size.mjs'));
|
||||
});
|
||||
|
||||
/** @param {string} permission */
|
||||
const permissionGetter = (permission) => async (_username) => permission;
|
||||
|
||||
describe('SIZE_LIMIT', () => {
|
||||
it('is 1000', () => {
|
||||
assert.equal(SIZE_LIMIT, 1000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasValidOverride', () => {
|
||||
it('returns false when there are no comments', async () => {
|
||||
const result = await hasValidOverride([], permissionGetter('write'));
|
||||
assert.equal(result, false);
|
||||
});
|
||||
|
||||
it('returns false when no comment starts with the override command', async () => {
|
||||
const comments = [
|
||||
{ body: 'Looks good to me!', user: { login: 'reviewer' } },
|
||||
{ body: 'Please split this PR.', user: { login: 'maintainer' } },
|
||||
];
|
||||
const result = await hasValidOverride(comments, permissionGetter('write'));
|
||||
assert.equal(result, false);
|
||||
});
|
||||
|
||||
it('returns true when a write-access user has posted the override command', async () => {
|
||||
const comments = [{ body: OVERRIDE_COMMAND, user: { login: 'maintainer' } }];
|
||||
const result = await hasValidOverride(comments, permissionGetter('write'));
|
||||
assert.ok(result);
|
||||
});
|
||||
|
||||
it('returns true for maintain permission', async () => {
|
||||
const comments = [{ body: OVERRIDE_COMMAND, user: { login: 'lead' } }];
|
||||
const result = await hasValidOverride(comments, permissionGetter('maintain'));
|
||||
assert.ok(result);
|
||||
});
|
||||
|
||||
it('returns true for admin permission', async () => {
|
||||
const comments = [{ body: OVERRIDE_COMMAND, user: { login: 'admin' } }];
|
||||
const result = await hasValidOverride(comments, permissionGetter('admin'));
|
||||
assert.ok(result);
|
||||
});
|
||||
|
||||
it('returns false when the override commenter only has read access', async () => {
|
||||
const comments = [{ body: OVERRIDE_COMMAND, user: { login: 'outsider' } }];
|
||||
const result = await hasValidOverride(comments, permissionGetter('read'));
|
||||
assert.equal(result, false);
|
||||
});
|
||||
|
||||
it('returns false when the override commenter only has triage access', async () => {
|
||||
const comments = [{ body: OVERRIDE_COMMAND, user: { login: 'triager' } }];
|
||||
const result = await hasValidOverride(comments, permissionGetter('triage'));
|
||||
assert.equal(result, false);
|
||||
});
|
||||
|
||||
it('returns false when the override command appears mid-comment, not at the start', async () => {
|
||||
const comments = [
|
||||
{
|
||||
body: `Please note: ${OVERRIDE_COMMAND} should only be used when justified.`,
|
||||
user: { login: 'maintainer' },
|
||||
},
|
||||
];
|
||||
const result = await hasValidOverride(comments, permissionGetter('write'));
|
||||
assert.equal(result, false);
|
||||
});
|
||||
|
||||
it('returns true when one of several comments is a valid override', async () => {
|
||||
const comments = [
|
||||
{ body: 'Looks good!', user: { login: 'reviewer' } },
|
||||
{ body: OVERRIDE_COMMAND, user: { login: 'maintainer' } },
|
||||
{ body: 'Please add tests.', user: { login: 'other' } },
|
||||
];
|
||||
const result = await hasValidOverride(comments, permissionGetter('write'));
|
||||
assert.ok(result);
|
||||
});
|
||||
|
||||
it('returns false when override comment exists but all posters lack write access', async () => {
|
||||
const comments = [
|
||||
{ body: OVERRIDE_COMMAND, user: { login: 'user1' } },
|
||||
{ body: OVERRIDE_COMMAND, user: { login: 'user2' } },
|
||||
];
|
||||
const result = await hasValidOverride(comments, permissionGetter('read'));
|
||||
assert.equal(result, false);
|
||||
});
|
||||
|
||||
it('checks permissions per commenter independently', async () => {
|
||||
const permissions = { writer: 'write', reader: 'read' };
|
||||
const getPermission = async (username) => permissions[username] ?? 'read';
|
||||
|
||||
const comments = [
|
||||
{ body: OVERRIDE_COMMAND, user: { login: 'reader' } },
|
||||
{ body: OVERRIDE_COMMAND, user: { login: 'writer' } },
|
||||
];
|
||||
const result = await hasValidOverride(comments, getPermission);
|
||||
assert.ok(result);
|
||||
});
|
||||
});
|
||||
|
||||
describe('countFilteredAdditions', () => {
|
||||
it('sums additions across all files when no patterns are given', () => {
|
||||
const files = [
|
||||
{ filename: 'src/foo.ts', additions: 100 },
|
||||
{ filename: 'src/bar.ts', additions: 200 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, []), 300);
|
||||
});
|
||||
|
||||
it('excludes files matching a glob pattern', () => {
|
||||
const files = [
|
||||
{ filename: 'src/foo.ts', additions: 100 },
|
||||
{ filename: 'src/foo.test.ts', additions: 500 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, ['**/*.test.ts']), 100);
|
||||
});
|
||||
|
||||
it('excludes files matching any of multiple patterns', () => {
|
||||
const files = [
|
||||
{ filename: 'src/foo.ts', additions: 100 },
|
||||
{ filename: 'src/foo.test.ts', additions: 200 },
|
||||
{ filename: 'src/foo.spec.ts', additions: 300 },
|
||||
{ filename: 'src/__tests__/bar.ts', additions: 400 },
|
||||
];
|
||||
assert.equal(
|
||||
countFilteredAdditions(files, ['**/*.test.ts', '**/*.spec.ts', '**/__tests__/**']),
|
||||
100,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns 0 when all files are excluded', () => {
|
||||
const files = [
|
||||
{ filename: 'src/foo.test.ts', additions: 100 },
|
||||
{ filename: 'src/bar.test.ts', additions: 200 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, ['**/*.test.ts']), 0);
|
||||
});
|
||||
|
||||
it('returns 0 for an empty file list', () => {
|
||||
assert.equal(countFilteredAdditions([], EXCLUDE_PATTERNS), 0);
|
||||
});
|
||||
|
||||
it('applies EXCLUDE_PATTERNS to common test file extensions', () => {
|
||||
const files = [
|
||||
{ filename: 'src/service.ts', additions: 50 },
|
||||
{ filename: 'src/service.test.ts', additions: 100 },
|
||||
{ filename: 'src/service.spec.ts', additions: 100 },
|
||||
{ filename: 'src/service.test.mjs', additions: 100 },
|
||||
{ filename: 'src/service.spec.mjs', additions: 100 },
|
||||
{ filename: 'src/service.test.js', additions: 100 },
|
||||
{ filename: 'src/service.spec.js', additions: 100 },
|
||||
{ filename: 'src/__tests__/helper.ts', additions: 100 },
|
||||
{ filename: 'src/component.snap', additions: 100 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, EXCLUDE_PATTERNS), 50);
|
||||
});
|
||||
|
||||
it('applies EXCLUDE_PATTERNS to test directories (test/, tests/, __tests__)', () => {
|
||||
const files = [
|
||||
{ filename: 'packages/cli/src/service.ts', additions: 50 },
|
||||
{ filename: 'packages/cli/test/unit/service.test.ts', additions: 100 },
|
||||
{ filename: 'packages/cli/test/integration/api.test.ts', additions: 100 },
|
||||
{ filename: 'packages/nodes-base/nodes/Foo/tests/Foo.test.ts', additions: 100 },
|
||||
{ filename: 'packages/core/src/__tests__/cipher.test.ts', additions: 100 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, EXCLUDE_PATTERNS), 50);
|
||||
});
|
||||
|
||||
it('applies EXCLUDE_PATTERNS to snapshots, fixtures, and mocks', () => {
|
||||
const files = [
|
||||
{ filename: 'packages/cli/src/service.ts', additions: 50 },
|
||||
{ filename: 'packages/editor-ui/src/__snapshots__/Canvas.test.ts.snap', additions: 100 },
|
||||
{ filename: 'packages/workflow/test/fixtures/workflow.json', additions: 100 },
|
||||
{ filename: 'packages/core/src/__mocks__/fs.ts', additions: 100 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, EXCLUDE_PATTERNS), 50);
|
||||
});
|
||||
|
||||
it('applies EXCLUDE_PATTERNS to packages/testing and pnpm-lock.yaml', () => {
|
||||
const files = [
|
||||
{ filename: 'packages/cli/src/service.ts', additions: 50 },
|
||||
{ filename: 'packages/testing/playwright/tests/workflow.spec.ts', additions: 100 },
|
||||
{ filename: 'packages/testing/playwright/pages/CanvasPage.ts', additions: 100 },
|
||||
{ filename: 'pnpm-lock.yaml', additions: 500 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, EXCLUDE_PATTERNS), 50);
|
||||
});
|
||||
|
||||
it('applies EXCLUDE_PATTERNS to markdown files', () => {
|
||||
const files = [
|
||||
{ filename: 'packages/cli/src/service.ts', additions: 50 },
|
||||
{ filename: 'packages/cli/AGENTS.md', additions: 100 },
|
||||
{ filename: 'packages/frontend/STORIES.mdx', additions: 100 },
|
||||
];
|
||||
assert.equal(countFilteredAdditions(files, EXCLUDE_PATTERNS), 50);
|
||||
});
|
||||
});
|
||||
97
.github/scripts/quality/handle-size-override.mjs
vendored
97
.github/scripts/quality/handle-size-override.mjs
vendored
|
|
@ -1,97 +0,0 @@
|
|||
/**
|
||||
* Re-triggers the PR Size Limit check when a maintainer comments `/size-limit-override`.
|
||||
*
|
||||
* Finds the latest `PR Size Limit` check run on the PR's HEAD commit and re-requests it.
|
||||
* The re-run scans comments, finds the override, and passes — satisfying branch protection
|
||||
* without any label manipulation or status API calls.
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 – Check run re-requested successfully
|
||||
* 1 – Commenter lacks permission, or no check run found to re-request
|
||||
*/
|
||||
|
||||
import { initGithub, getEventFromGithubEventPath } from '../github-helpers.mjs';
|
||||
|
||||
const CHECK_NAME = 'PR Size Limit';
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* octokit: import('../github-helpers.mjs').GitHubInstance,
|
||||
* owner: string,
|
||||
* repo: string,
|
||||
* prNumber: number,
|
||||
* commenter: string,
|
||||
* commentId: number,
|
||||
* }} params
|
||||
*/
|
||||
export async function run({ octokit, owner, repo, prNumber, commenter, commentId }) {
|
||||
const { data: perm } = await octokit.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username: commenter,
|
||||
});
|
||||
|
||||
if (!['admin', 'write', 'maintain'].includes(perm.permission)) {
|
||||
console.log(
|
||||
`::error::@${commenter} does not have permission to override the PR size limit (requires write access).`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { data: pr } = await octokit.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber,
|
||||
});
|
||||
const headSha = pr.head.sha;
|
||||
|
||||
const {
|
||||
data: { check_runs },
|
||||
} = await octokit.rest.checks.listForRef({
|
||||
owner,
|
||||
repo,
|
||||
ref: headSha,
|
||||
check_name: CHECK_NAME,
|
||||
per_page: 1,
|
||||
});
|
||||
|
||||
if (check_runs.length === 0) {
|
||||
console.log(
|
||||
`::error::No '${CHECK_NAME}' check run found for ${headSha}. Push a new commit to trigger it.`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await octokit.rest.checks.rerequestRun({
|
||||
owner,
|
||||
repo,
|
||||
check_run_id: check_runs[0].id,
|
||||
});
|
||||
|
||||
await octokit.rest.reactions.createForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: commentId,
|
||||
content: '+1',
|
||||
});
|
||||
|
||||
console.log(`Re-requested '${CHECK_NAME}' check run (${check_runs[0].id}) for ${headSha}`);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const event = getEventFromGithubEventPath();
|
||||
const { octokit, owner, repo } = initGithub();
|
||||
|
||||
await run({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
prNumber: event.issue.number,
|
||||
commenter: event.sender.login,
|
||||
commentId: event.comment.id,
|
||||
});
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
await main();
|
||||
}
|
||||
32
.github/scripts/send-docker-stats.mjs
vendored
32
.github/scripts/send-docker-stats.mjs
vendored
|
|
@ -18,18 +18,6 @@ import { existsSync, readFileSync } from 'node:fs';
|
|||
|
||||
import { sendMetrics, metric } from './send-metrics.mjs';
|
||||
|
||||
/** Parse human-readable sizes (e.g. "1.5G", "500M", "12K") to MB. */
|
||||
function parseSizeToMB(val) {
|
||||
if (typeof val === 'number') return val / (1024 * 1024);
|
||||
if (typeof val !== 'string') return null;
|
||||
const match = val.match(/^([\d.]+)\s*([KMGT]?)i?B?$/i);
|
||||
if (!match) return null;
|
||||
const num = parseFloat(match[1]);
|
||||
const suffix = match[2].toUpperCase();
|
||||
const toMB = { '': 1 / (1024 * 1024), 'K': 1 / 1024, 'M': 1, 'G': 1024, 'T': 1024 * 1024 };
|
||||
return Math.round(num * (toMB[suffix] ?? 1) * 100) / 100;
|
||||
}
|
||||
|
||||
const buildManifestPath = 'compiled/build-manifest.json';
|
||||
const dockerManifestPath = 'docker-build-manifest.json';
|
||||
|
||||
|
|
@ -49,13 +37,11 @@ const dockerManifest = existsSync(dockerManifestPath)
|
|||
const metrics = [];
|
||||
|
||||
if (buildManifest) {
|
||||
const sizeMB = parseSizeToMB(buildManifest.artifactSize);
|
||||
if (sizeMB != null) {
|
||||
metrics.push(metric('artifact-size', sizeMB, 'MB', { artifact: 'compiled' }));
|
||||
if (buildManifest.artifactSize != null) {
|
||||
metrics.push(metric('artifact-size', buildManifest.artifactSize, 'bytes', { artifact: 'compiled' }));
|
||||
}
|
||||
const duration = buildManifest.buildDuration;
|
||||
if (duration?.total != null) {
|
||||
metrics.push(metric('build-duration', duration.total / 1000, 's', { artifact: 'compiled' }));
|
||||
if (buildManifest.buildDuration != null) {
|
||||
metrics.push(metric('build-duration', buildManifest.buildDuration / 1000, 's', { artifact: 'compiled' }));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -63,12 +49,12 @@ if (dockerManifest) {
|
|||
const platform = dockerManifest.platform ?? 'unknown';
|
||||
|
||||
for (const image of dockerManifest.images ?? []) {
|
||||
const imageSizeMB = parseSizeToMB(image.size ?? image.sizeBytes);
|
||||
const imageName = image.imageName ?? image.name ?? 'unknown';
|
||||
const shortName = imageName.replace(/^n8nio\//, '').replace(/:.*$/, '');
|
||||
if (imageSizeMB != null) {
|
||||
if (image.sizeBytes != null) {
|
||||
metrics.push(
|
||||
metric(`docker-image-size-${shortName}`, imageSizeMB, 'MB', { platform }),
|
||||
metric('docker-image-size', image.sizeBytes, 'bytes', {
|
||||
image: image.name ?? 'unknown',
|
||||
platform,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,72 +0,0 @@
|
|||
import { getMonorepoProjects } from './pnpm-utils.mjs';
|
||||
|
||||
const NPM_REGISTRY = 'https://registry.npmjs.org';
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {string} version
|
||||
* @param {string} tag
|
||||
* @param {string} token
|
||||
*/
|
||||
async function setDistTag(name, version, tag, token) {
|
||||
// Scoped package names need both @ and / encoded (e.g. @n8n/foo → %40n8n%2ffoo)
|
||||
const encodedName = encodeURIComponent(name);
|
||||
const url = `${NPM_REGISTRY}/-/package/${encodedName}/dist-tags/${tag}`;
|
||||
|
||||
return fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(version),
|
||||
});
|
||||
}
|
||||
|
||||
async function setLatestForMonorepoPackages() {
|
||||
const token = process.env.NPM_TOKEN;
|
||||
if (!token) {
|
||||
throw new Error('NPM_TOKEN environment variable is required');
|
||||
}
|
||||
|
||||
const packages = await getMonorepoProjects();
|
||||
|
||||
const publishedPackages = packages //
|
||||
.filter((pkg) => !pkg.private)
|
||||
.filter((pkg) => pkg.name.startsWith('@n8n/'))
|
||||
.filter((pkg) => pkg.version);
|
||||
|
||||
const failures = [];
|
||||
|
||||
for (const pkg of publishedPackages) {
|
||||
const versionName = `${pkg.name}@${pkg.version}`;
|
||||
|
||||
try {
|
||||
const res = await setDistTag(pkg.name, pkg.version, 'latest', token);
|
||||
|
||||
if (res.ok) {
|
||||
console.log(`Set ${versionName} as latest`);
|
||||
} else {
|
||||
const body = await res.text().catch(() => '');
|
||||
console.error(`Failed to set ${versionName} as latest: HTTP ${res.status} ${body}`);
|
||||
failures.push(versionName);
|
||||
}
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`Failed to set ${versionName} as latest: ${message}`);
|
||||
failures.push(versionName);
|
||||
}
|
||||
}
|
||||
|
||||
if (failures.length > 0) {
|
||||
throw new Error(`Failed to update dist-tags for: ${failures.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
// only run when executed directly, not when imported by tests
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
setLatestForMonorepoPackages().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
90
.github/scripts/validate-docs-links.js
vendored
Normal file
90
.github/scripts/validate-docs-links.js
vendored
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const packages = ['nodes-base', '@n8n/nodes-langchain'];
|
||||
const concurrency = 20;
|
||||
let exitCode = 0;
|
||||
|
||||
const debug = require('debug')('n8n');
|
||||
const path = require('path');
|
||||
const https = require('https');
|
||||
const glob = require('glob');
|
||||
const pLimit = require('p-limit');
|
||||
const picocolors = require('picocolors');
|
||||
const Lookup = require('cacheable-lookup').default;
|
||||
|
||||
const agent = new https.Agent({ keepAlive: true, keepAliveMsecs: 5000 });
|
||||
new Lookup().install(agent);
|
||||
const limiter = pLimit(concurrency);
|
||||
|
||||
const validateUrl = async (packageName, kind, type) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const name = type.displayName;
|
||||
const documentationUrl =
|
||||
kind === 'credentials'
|
||||
? type.documentationUrl
|
||||
: type.codex?.resources?.primaryDocumentation?.[0]?.url;
|
||||
if (!documentationUrl) resolve([name, null]);
|
||||
|
||||
const url = new URL(
|
||||
/^https?:\/\//.test(documentationUrl)
|
||||
? documentationUrl
|
||||
: `https://docs.n8n.io/integrations/builtin/${kind}/${documentationUrl.toLowerCase()}/`,
|
||||
);
|
||||
https
|
||||
.request(
|
||||
{
|
||||
hostname: url.hostname,
|
||||
port: 443,
|
||||
path: url.pathname,
|
||||
method: 'HEAD',
|
||||
agent,
|
||||
},
|
||||
(res) => {
|
||||
debug(picocolors.green('✓'), packageName, kind, name);
|
||||
resolve([name, res.statusCode]);
|
||||
},
|
||||
)
|
||||
.on('error', (e) => {
|
||||
debug(picocolors.red('✘'), packageName, kind, name);
|
||||
reject(e);
|
||||
})
|
||||
.end();
|
||||
});
|
||||
|
||||
const checkLinks = async (packageName, kind) => {
|
||||
const baseDir = path.resolve(__dirname, '../../packages', packageName);
|
||||
let types = require(path.join(baseDir, `dist/types/${kind}.json`));
|
||||
if (kind === 'nodes')
|
||||
types = types.filter(
|
||||
({ codex, hidden }) => !!codex?.resources?.primaryDocumentation && !hidden,
|
||||
);
|
||||
debug(packageName, kind, types.length);
|
||||
|
||||
const statuses = await Promise.all(
|
||||
types.map((type) =>
|
||||
limiter(() => {
|
||||
return validateUrl(packageName, kind, type);
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
const missingDocs = [];
|
||||
const invalidUrls = [];
|
||||
for (const [name, statusCode] of statuses) {
|
||||
if (statusCode === null) missingDocs.push(name);
|
||||
if (statusCode !== 200) invalidUrls.push(name);
|
||||
}
|
||||
|
||||
if (missingDocs.length)
|
||||
console.log('Documentation URL missing in %s for %s', packageName, kind, missingDocs);
|
||||
if (invalidUrls.length)
|
||||
console.log('Documentation URL invalid in %s for %s', packageName, kind, invalidUrls);
|
||||
if (missingDocs.length || invalidUrls.length) exitCode = 1;
|
||||
};
|
||||
|
||||
(async () => {
|
||||
for (const packageName of packages) {
|
||||
await Promise.all([checkLinks(packageName, 'credentials'), checkLinks(packageName, 'nodes')]);
|
||||
if (exitCode !== 0) process.exit(exitCode);
|
||||
}
|
||||
})();
|
||||
1300
.github/test-metrics/playwright.json
vendored
1300
.github/test-metrics/playwright.json
vendored
File diff suppressed because it is too large
Load Diff
45
.github/test-metrics/quarantine.json
vendored
45
.github/test-metrics/quarantine.json
vendored
|
|
@ -1,45 +0,0 @@
|
|||
{
|
||||
"updatedAt": "2026-05-11T14:16:56.139Z",
|
||||
"source": "currents",
|
||||
"projectId": "LRxcNt",
|
||||
"quarantined": [
|
||||
"Canvas Actions > Node hover actions > should execute node",
|
||||
"Chat user role @capability:proxy > use chat as chat user @auth:chat",
|
||||
"Code node > Code editor > should execute the placeholder successfully in both modes",
|
||||
"Data Mapping > maps expressions to updated fields correctly @fixme",
|
||||
"Data pinning > Advanced pinning scenarios > should be able to reference paired items in node before pinned data",
|
||||
"Debug mode > should enter debug mode for failed executions",
|
||||
"HITL for Tools @capability:proxy > should add a HITL tool node and run it",
|
||||
"Inject previous execution > can map keys from previous execution",
|
||||
"Instance AI remediation guard @capability:proxy > should preserve a submitted workflow when mocked credential verification needs setup",
|
||||
"Instance AI sidebar @capability:proxy > should delete thread via action menu",
|
||||
"Instance AI workflow setup actions @capability:proxy > should apply parameter and credential edits and persist them to the workflow",
|
||||
"Instance AI workflow setup actions @capability:proxy > should partially apply completed cards when Later is clicked on the last step",
|
||||
"Loads template setup modal correctly",
|
||||
"NDV Data Display > Schema View > should not display pagination for schema",
|
||||
"Settings @capability:proxy > set global credentials for a provider",
|
||||
"Tools usage @capability:proxy > use web search tool in conversation",
|
||||
"can configure, connect, and sync secrets from LocalStack",
|
||||
"can create a connection pointing to LocalStack",
|
||||
"manage workflow agents @auth:admin",
|
||||
"maps expressions to updated fields correctly @fixme",
|
||||
"sharing workflow agent with project chat user",
|
||||
"should add switch node and test connections",
|
||||
"should allow re-running workflow after initial execution",
|
||||
"should be able to reference paired items in node before pinned data",
|
||||
"should clear required-parameter issue indicator when the field is filled",
|
||||
"should execute node",
|
||||
"should filter executions by status and show filter badge",
|
||||
"should maintain zoom functionality after switching between Editor and Workflow history and Workflow list",
|
||||
"should not send workflow context if nothing changed",
|
||||
"should open executions tab",
|
||||
"should populate logs as manual execution progresses",
|
||||
"should preserve resource mapper values when navigating between connected nodes via floating nodes",
|
||||
"should render runItems for sub-nodes and allow switching between them",
|
||||
"should reset filter and remove badge",
|
||||
"should retrieve list options when other params throw errors",
|
||||
"should save template id with the workflow",
|
||||
"should send proper payload for node rerun",
|
||||
"use web search tool in conversation"
|
||||
]
|
||||
}
|
||||
5
.github/trivy.yaml
vendored
Normal file
5
.github/trivy.yaml
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# Trivy configuration for n8n security scans
|
||||
# See: https://trivy.dev/latest/docs/references/configuration/config-file/
|
||||
vulnerability:
|
||||
vex:
|
||||
- vex.openvex.json
|
||||
2
.github/workflows/build-base-image.yml
vendored
2
.github/workflows/build-base-image.yml
vendored
|
|
@ -25,7 +25,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node_version: ['22', '24.14.1', '25']
|
||||
node_version: ['22', '24.13.1', '25']
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
|
|
|
|||
100
.github/workflows/build-unit-test-pr-comment.yml
vendored
Normal file
100
.github/workflows/build-unit-test-pr-comment.yml
vendored
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
name: 'Build: Unit Test PR Comment'
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
pull-requests: read
|
||||
contents: read
|
||||
actions: write
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
validate_and_dispatch:
|
||||
name: Validate user and dispatch CI workflow
|
||||
if: github.event.issue.pull_request && startsWith(github.event.comment.body, '/build-unit-test')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Validate user permissions and collect PR data
|
||||
id: check_permissions
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const commenter = context.actor;
|
||||
const body = (context.payload.comment.body || '').trim();
|
||||
const isCommand = body.startsWith('/build-unit-test');
|
||||
const allowedPermissions = ['admin', 'write', 'maintain'];
|
||||
const commentId = context.payload.comment.id;
|
||||
const { owner, repo } = context.repo;
|
||||
|
||||
async function react(content) {
|
||||
try {
|
||||
await github.rest.reactions.createForIssueComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: commentId,
|
||||
content,
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(`Failed to add reaction '${content}': ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
core.setOutput('proceed', 'false');
|
||||
core.setOutput('headSha', '');
|
||||
core.setOutput('prNumber', '');
|
||||
|
||||
if (!context.payload.issue.pull_request || !isCommand) {
|
||||
console.log('Comment is not /build-unit-test on a pull request. Skipping.');
|
||||
return;
|
||||
}
|
||||
|
||||
let permission;
|
||||
try {
|
||||
const { data } = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||
owner,
|
||||
repo,
|
||||
username: commenter,
|
||||
});
|
||||
permission = data.permission;
|
||||
} catch (error) {
|
||||
console.log(`Could not verify permissions for @${commenter}: ${error.message}`);
|
||||
await react('confused');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!allowedPermissions.includes(permission)) {
|
||||
console.log(`User @${commenter} has '${permission}' permission; requires admin/write/maintain.`);
|
||||
await react('-1');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const prNumber = context.issue.number;
|
||||
const { data: pr } = await github.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber,
|
||||
});
|
||||
await react('+1');
|
||||
core.setOutput('proceed', 'true');
|
||||
core.setOutput('headSha', pr.head.sha);
|
||||
core.setOutput('prNumber', String(prNumber));
|
||||
} catch (error) {
|
||||
console.log(`Failed to fetch PR details for PR #${context.issue.number}: ${error.message}`);
|
||||
await react('confused');
|
||||
}
|
||||
|
||||
- name: Dispatch build/unit test workflow
|
||||
if: ${{ steps.check_permissions.outputs.proceed == 'true' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HEAD_SHA: ${{ steps.check_permissions.outputs.headSha }}
|
||||
PR_NUMBER: ${{ steps.check_permissions.outputs.prNumber }}
|
||||
run: |
|
||||
gh workflow run ci-manual-unit-tests.yml \
|
||||
--repo "${{ github.repository }}" \
|
||||
-f ref="${HEAD_SHA}" \
|
||||
-f pr_number="${PR_NUMBER}"
|
||||
111
.github/workflows/ci-check-eligibility-reusable.yml
vendored
Normal file
111
.github/workflows/ci-check-eligibility-reusable.yml
vendored
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
# Determines if conditions are met for running subsequent jobs on a Pull Request.
|
||||
#
|
||||
# !! IMPORTANT !!
|
||||
# This workflow RELIES on being called from a parent workflow triggered by
|
||||
# a `pull_request` or `pull_request_target` event. It uses `github.event`
|
||||
# to access PR details.
|
||||
#
|
||||
# It checks if all the following conditions are TRUE:
|
||||
# 1. The PR is NOT from a fork (i.e., it's an internal PR).
|
||||
# 2. The PR has been approved by a maintainer (`is_pr_approved_by_maintainer`).
|
||||
# 3. The PR's source branch does NOT match an excluded pattern.
|
||||
# 4. The PR includes relevant file changes (`paths_filter_patterns`).
|
||||
#
|
||||
# It outputs `should_run` as 'true' if ALL conditions pass, 'false' otherwise.
|
||||
|
||||
name: 'CI: Check Eligibility'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
is_pr_approved_by_maintainer:
|
||||
required: true
|
||||
type: boolean
|
||||
paths_filter_patterns:
|
||||
description: "Path filter patterns for 'paths-filter-action'."
|
||||
required: false
|
||||
type: string
|
||||
default: |
|
||||
not_ignored:
|
||||
- '!.devcontainer/**'
|
||||
- '!.github/*'
|
||||
- '!.github/scripts/*'
|
||||
- '!.github/workflows/benchmark-*'
|
||||
- '!.github/workflows/check-*'
|
||||
- '!.vscode/**'
|
||||
- '!docker/**'
|
||||
- '!packages/@n8n/benchmark/**'
|
||||
- '!packages/@n8n/task-runner-python/**'
|
||||
- '!**/*.md'
|
||||
excluded_source_branch_patterns:
|
||||
description: 'Newline-separated list of glob patterns for source branches to EXCLUDE.'
|
||||
required: false
|
||||
type: string
|
||||
default: |
|
||||
release/*
|
||||
master
|
||||
|
||||
outputs:
|
||||
should_run:
|
||||
description: "Outputs 'true' if all eligibility checks pass, otherwise 'false'."
|
||||
value: ${{ jobs.evaluate_conditions.outputs.run_decision }}
|
||||
|
||||
jobs:
|
||||
evaluate_conditions:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
run_decision: ${{ steps.evaluate.outputs.should_run }}
|
||||
steps:
|
||||
- name: Check out current commit
|
||||
uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
|
||||
- name: Determine changed files
|
||||
uses: tomi/paths-filter-action@32c62f5ca100c1110406e3477d5b3ecef4666fec # v3.0.2
|
||||
id: changed
|
||||
with:
|
||||
filters: ${{ inputs.paths_filter_patterns }}
|
||||
predicate-quantifier: 'every'
|
||||
|
||||
- name: Evaluate Conditions & Set Output
|
||||
id: evaluate
|
||||
env:
|
||||
IS_FORK: ${{ github.event.pull_request.head.repo.fork }}
|
||||
IS_APPROVED: ${{ inputs.is_pr_approved_by_maintainer }}
|
||||
FILES_CHANGED: ${{ steps.changed.outputs.not_ignored == 'true' }}
|
||||
HEAD_REF: ${{ github.event.pull_request.head.ref }}
|
||||
EXCLUDED_PATTERNS: ${{ inputs.excluded_source_branch_patterns }}
|
||||
run: |
|
||||
if [[ "$IS_FORK" == "true" ]]; then
|
||||
is_community="true"
|
||||
else
|
||||
is_community="false"
|
||||
fi
|
||||
|
||||
source_branch_excluded="false"
|
||||
while IFS= read -r pattern; do
|
||||
# shellcheck disable=SC2053
|
||||
if [[ -n "$pattern" && "$HEAD_REF" == $pattern ]]; then
|
||||
source_branch_excluded="true"
|
||||
break
|
||||
fi
|
||||
done <<< "$EXCLUDED_PATTERNS"
|
||||
|
||||
echo "--- Checking Conditions ---"
|
||||
echo "Is NOT Community PR: $([[ "$is_community" == "false" ]] && echo true || echo false)"
|
||||
echo "Files Changed: $FILES_CHANGED"
|
||||
echo "Source Branch Excluded: $source_branch_excluded"
|
||||
echo "Is Approved: $IS_APPROVED"
|
||||
echo "-------------------------"
|
||||
|
||||
if [[ "$is_community" == "false" && \
|
||||
"$FILES_CHANGED" == "true" && \
|
||||
"$source_branch_excluded" == "false" && \
|
||||
"$IS_APPROVED" == "true" ]]; then
|
||||
echo "Decision: Conditions met. Setting should_run=true."
|
||||
echo "should_run=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "Decision: Conditions not met. Setting should_run=false."
|
||||
echo "should_run=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
184
.github/workflows/ci-cla-check.yml
vendored
184
.github/workflows/ci-cla-check.yml
vendored
|
|
@ -1,184 +0,0 @@
|
|||
name: 'CI: CLA Check'
|
||||
|
||||
# In-house replacement for the GitHub App "CLA Bot".
|
||||
#
|
||||
# Triggers
|
||||
# - pull_request_target (opened/synchronize/reopened): re-checks signatures
|
||||
# whenever a PR is opened or new commits are pushed.
|
||||
# - issue_comment (`/cla-check` on a PR): manual re-check after a contributor
|
||||
# signs the CLA, without needing a push.
|
||||
# - merge_group: re-checks at merge-queue time so a ruleset can hard-block
|
||||
# unsigned merges even if the PR check went stale.
|
||||
#
|
||||
# Output
|
||||
# - A commit status named "CLA Check" on the head SHA. Add this name to a
|
||||
# ruleset's required-checks list to gate merges on it.
|
||||
# - A single, edited-in-place PR comment listing unsigned contributors.
|
||||
#
|
||||
# Implementation
|
||||
# The heavy lifting lives in .github/scripts/cla/*.mjs. Each step below
|
||||
# loads its corresponding module and invokes its default export.
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
pr_number:
|
||||
description: 'Pull request number to re-verify'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
statuses: write
|
||||
|
||||
concurrency:
|
||||
group: cla-check-${{ github.event.pull_request.number || github.event.issue.number || github.event.merge_group.head_sha || github.event.inputs.pr_number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
STATUS_CONTEXT: 'CLA Check'
|
||||
CLA_API: 'https://cla-bot-prod.users.n8n.cloud/webhook/cla/check'
|
||||
CLA_SIGN_URL: 'https://cla-bot-prod.users.n8n.cloud/webhook/cla'
|
||||
COMMENT_MARKER: '<!-- n8n-cla-check -->'
|
||||
|
||||
jobs:
|
||||
cla-check:
|
||||
name: Verify CLA signatures
|
||||
# Skip issue_comment unless it's on a PR and the body starts with /cla-check.
|
||||
if: >-
|
||||
github.event_name != 'issue_comment' ||
|
||||
(github.event.issue.pull_request != null &&
|
||||
startsWith(github.event.comment.body, '/cla-check'))
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.N8N_ASSISTANT_APP_ID }}
|
||||
private-key: ${{ secrets.N8N_ASSISTANT_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout CLA scripts
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
sparse-checkout: .github/scripts/cla
|
||||
sparse-checkout-cone-mode: false
|
||||
|
||||
- name: Resolve PR context
|
||||
id: context
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const mod = await import('${{ github.workspace }}/.github/scripts/cla/resolve-context.mjs');
|
||||
await mod.default({ github, context, core });
|
||||
|
||||
- name: Post pending commit status
|
||||
if: steps.context.outputs.head_sha != ''
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
HEAD_SHA: ${{ steps.context.outputs.head_sha }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
await github.rest.repos.createCommitStatus({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
sha: process.env.HEAD_SHA,
|
||||
state: 'pending',
|
||||
context: process.env.STATUS_CONTEXT,
|
||||
description: 'Verifying CLA signatures…',
|
||||
});
|
||||
|
||||
- name: Check CLA signatures
|
||||
id: check
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.context.outputs.pr_number }}
|
||||
HEAD_SHA: ${{ steps.context.outputs.head_sha }}
|
||||
BASE_SHA: ${{ steps.context.outputs.base_sha }}
|
||||
IS_MERGE_GROUP: ${{ steps.context.outputs.is_merge_group }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const mod = await import('${{ github.workspace }}/.github/scripts/cla/check-signatures.mjs');
|
||||
await mod.default({ github, context, core });
|
||||
|
||||
- name: Post final commit status
|
||||
if: always() && steps.context.outputs.head_sha != ''
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
HEAD_SHA: ${{ steps.context.outputs.head_sha }}
|
||||
PR_NUMBER: ${{ steps.context.outputs.pr_number }}
|
||||
ALL_SIGNED: ${{ steps.check.outputs.all_signed }}
|
||||
UNSIGNED: ${{ steps.check.outputs.unsigned }}
|
||||
ERRORED: ${{ steps.check.outputs.errored }}
|
||||
UNLINKED: ${{ steps.check.outputs.unlinked }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const mod = await import('${{ github.workspace }}/.github/scripts/cla/post-final-status.mjs');
|
||||
await mod.default({ github, context, core });
|
||||
|
||||
- name: Update PR comment
|
||||
# Don't comment from merge_group (no PR context) or when the check
|
||||
# failed to produce a result.
|
||||
if: >-
|
||||
always() &&
|
||||
steps.context.outputs.pr_number != '' &&
|
||||
steps.check.outputs.all_signed != ''
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.context.outputs.pr_number }}
|
||||
ALL_SIGNED: ${{ steps.check.outputs.all_signed }}
|
||||
UNSIGNED: ${{ steps.check.outputs.unsigned }}
|
||||
ERRORED: ${{ steps.check.outputs.errored }}
|
||||
UNLINKED: ${{ steps.check.outputs.unlinked }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const mod = await import('${{ github.workspace }}/.github/scripts/cla/update-pr-comment.mjs');
|
||||
await mod.default({ github, context, core });
|
||||
|
||||
- name: Manage cla-signed label
|
||||
# Skip on merge_group (no PR) and when the check produced no result.
|
||||
if: >-
|
||||
always() &&
|
||||
steps.context.outputs.pr_number != '' &&
|
||||
steps.check.outputs.all_signed != ''
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.context.outputs.pr_number }}
|
||||
ALL_SIGNED: ${{ steps.check.outputs.all_signed }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const mod = await import('${{ github.workspace }}/.github/scripts/cla/manage-label.mjs');
|
||||
await mod.default({ github, context, core });
|
||||
|
||||
- name: React to /cla-check comment
|
||||
if: always() && github.event_name == 'issue_comment' && steps.check.outputs.all_signed != ''
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
env:
|
||||
ALL_SIGNED: ${{ steps.check.outputs.all_signed }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
try {
|
||||
await github.rest.reactions.createForIssueComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: context.payload.comment.id,
|
||||
content: process.env.ALL_SIGNED === 'true' ? '+1' : '-1',
|
||||
});
|
||||
} catch (e) {
|
||||
core.info(`Could not react to comment: ${e.message}`);
|
||||
}
|
||||
23
.github/workflows/ci-codeowners-validation.yml
vendored
23
.github/workflows/ci-codeowners-validation.yml
vendored
|
|
@ -1,23 +0,0 @@
|
|||
# .github/workflows/ci-codeowners-validation.yml
|
||||
name: "CI: Validate CODEOWNERS"
|
||||
|
||||
# Only run when CODEOWNERS or packages change
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/CODEOWNERS"
|
||||
- "packages/**"
|
||||
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- uses: mszostok/codeowners-validator@7f3f5e28c6d7b8dfae5731e54ce2272ca384592f #v0.7.4
|
||||
with:
|
||||
# Start with safe checks only. Add "owners" and
|
||||
# experimental_checks: "notowned" once the file has settled
|
||||
# and skip patterns are configured.
|
||||
checks: "files,duppatterns,syntax"
|
||||
github_access_token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
47
.github/workflows/ci-detect-new-packages.yml
vendored
47
.github/workflows/ci-detect-new-packages.yml
vendored
|
|
@ -1,47 +0,0 @@
|
|||
name: 'CI: Detect New Packages on Master'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
detect-new-packages:
|
||||
name: Check for new unpublished packages
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Check for new unpublished packages
|
||||
id: detect
|
||||
continue-on-error: true
|
||||
run: node .github/scripts/detect-new-packages.mjs
|
||||
|
||||
- name: Notify Slack about new packages
|
||||
if: steps.detect.outcome == 'failure' && steps.detect.outputs.packages != ''
|
||||
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.RELEASE_HELPER_SLACK_TOKEN }}
|
||||
payload: |
|
||||
channel: C036AELNMV0
|
||||
text: |-
|
||||
:warning: *New unpublished packages detected* after merging <${{ github.event.pull_request.html_url }}|PR #${{ github.event.pull_request.number }}: ${{ github.event.pull_request.title }}>
|
||||
|
||||
The following packages do not exist on npm yet: `${{ steps.detect.outputs.packages }}`
|
||||
|
||||
*If a package is not intended for npm*, set `"private": true` in its `package.json` to exclude it from future checks.
|
||||
|
||||
*Otherwise, to unblock the next release:*
|
||||
1. Run the <${{ github.server_url }}/${{ github.repository }}/actions/workflows/release-publish-new-package.yml|Release: Publish New Package> workflow for each package
|
||||
2. Configure Trusted Publishing on npmjs.com (owner: `n8n-io`, repo: `n8n`, workflow: `release-publish.yml`)
|
||||
132
.github/workflows/ci-manual-unit-tests.yml
vendored
Normal file
132
.github/workflows/ci-manual-unit-tests.yml
vendored
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
name: 'CI: Manual Unit Tests'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: Commit SHA or ref to check out
|
||||
required: true
|
||||
pr_number:
|
||||
description: PR number (optional, for check reporting)
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
checks: write
|
||||
|
||||
jobs:
|
||||
create-check-run:
|
||||
name: Create Check Run
|
||||
runs-on: ubuntu-latest
|
||||
if: inputs.pr_number != ''
|
||||
outputs:
|
||||
check_run_id: ${{ steps.create.outputs.check_run_id }}
|
||||
steps:
|
||||
- name: Create pending check run on PR
|
||||
id: create
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const { data: checkRun } = await github.rest.checks.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'Build & Unit Tests - Checks',
|
||||
head_sha: '${{ inputs.ref }}',
|
||||
status: 'in_progress',
|
||||
output: {
|
||||
title: 'Build & Unit Tests - Checks',
|
||||
summary: 'Running build, unit tests, and lint...'
|
||||
}
|
||||
});
|
||||
|
||||
core.setOutput('check_run_id', checkRun.id);
|
||||
console.log(`Created check run ${checkRun.id} on commit ${{ inputs.ref }}`);
|
||||
|
||||
install-and-build:
|
||||
name: Install & Build
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2204
|
||||
env:
|
||||
NODE_OPTIONS: '--max-old-space-size=6144'
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- name: Setup and Build
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
|
||||
- name: Run format check
|
||||
run: pnpm format:check
|
||||
|
||||
- name: Run typecheck
|
||||
run: pnpm typecheck
|
||||
|
||||
unit-tests:
|
||||
name: Unit tests
|
||||
needs: install-and-build
|
||||
uses: ./.github/workflows/test-unit-reusable.yml
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
collectCoverage: true
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
needs: install-and-build
|
||||
uses: ./.github/workflows/test-linting-reusable.yml
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
post-build-unit-tests:
|
||||
name: Build & Unit Tests - Checks
|
||||
runs-on: ubuntu-latest
|
||||
needs: [create-check-run, install-and-build, unit-tests, lint]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Update check run on PR (if triggered from PR comment)
|
||||
if: inputs.pr_number != ''
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const checkRunId = '${{ needs.create-check-run.outputs.check_run_id }}';
|
||||
|
||||
if (!checkRunId) {
|
||||
console.log('No check run ID found, skipping update');
|
||||
return;
|
||||
}
|
||||
|
||||
const buildResult = '${{ needs.install-and-build.result }}';
|
||||
const testResult = '${{ needs.unit-tests.result }}';
|
||||
const lintResult = '${{ needs.lint.result }}';
|
||||
|
||||
const conclusion = (buildResult === 'success' && testResult === 'success' && lintResult === 'success')
|
||||
? 'success'
|
||||
: 'failure';
|
||||
|
||||
const summary = `
|
||||
**Build**: ${buildResult}
|
||||
**Unit Tests**: ${testResult}
|
||||
**Lint**: ${lintResult}
|
||||
`;
|
||||
|
||||
await github.rest.checks.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
check_run_id: parseInt(checkRunId),
|
||||
status: 'completed',
|
||||
conclusion: conclusion,
|
||||
output: {
|
||||
title: 'Build & Unit Tests - Checks',
|
||||
summary: summary
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Updated check run ${checkRunId} with conclusion: ${conclusion}`);
|
||||
|
||||
- name: Fail if any job failed
|
||||
if: needs.install-and-build.result == 'failure' || needs.unit-tests.result == 'failure' || needs.lint.result == 'failure'
|
||||
run: exit 1
|
||||
4
.github/workflows/ci-master.yml
vendored
4
.github/workflows/ci-master.yml
vendored
|
|
@ -28,11 +28,11 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
node-version: [22.x, 24.14.1, 25.x]
|
||||
node-version: [22.x, 24.13.1, 25.x]
|
||||
with:
|
||||
ref: ${{ github.sha }}
|
||||
nodeVersion: ${{ matrix.node-version }}
|
||||
collectCoverage: ${{ matrix.node-version == '24.14.1' }}
|
||||
collectCoverage: ${{ matrix.node-version == '24.13.1' }}
|
||||
secrets: inherit
|
||||
|
||||
lint:
|
||||
|
|
|
|||
181
.github/workflows/ci-pr-quality.yml
vendored
181
.github/workflows/ci-pr-quality.yml
vendored
|
|
@ -1,181 +0,0 @@
|
|||
name: 'CI: PR Quality Checks'
|
||||
|
||||
on:
|
||||
merge_group:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- synchronize
|
||||
branches:
|
||||
- master
|
||||
issue_comment:
|
||||
types:
|
||||
- created
|
||||
|
||||
jobs:
|
||||
handle-size-override:
|
||||
name: Handle /size-limit-override
|
||||
# Re-requests the PR Size Limit check run on the PR's HEAD commit, so it re-runs
|
||||
# in the original PR context and picks up the override comment.
|
||||
if: |
|
||||
github.event_name == 'issue_comment' &&
|
||||
github.event.issue.pull_request &&
|
||||
startsWith(github.event.comment.body, '/size-limit-override')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
checks: write
|
||||
issues: write
|
||||
pull-requests: read
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Re-request PR Size Limit check
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: node .github/scripts/quality/handle-size-override.mjs
|
||||
|
||||
check-ownership-checkbox:
|
||||
name: Ownership Acknowledgement
|
||||
# Checks that the author has acknowledged the ownership of their code
|
||||
# by checking the checkbox in the PR summary.
|
||||
# Skipped for bot-authored PRs (Dependabot, Renovate, github-actions, Aikido, etc.).
|
||||
# The required aggregator `required-pr-quality-checks` treats skipped as success.
|
||||
if: |
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository &&
|
||||
!contains(github.event.pull_request.labels.*.name, 'automation:backport') &&
|
||||
!contains(github.event.pull_request.title, '(backport to') &&
|
||||
github.event.pull_request.user.type != 'Bot'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Check ownership checkbox
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: node .github/scripts/quality/check-ownership-checkbox.mjs
|
||||
|
||||
check-pr-size:
|
||||
name: PR Size Limit
|
||||
# Checks that the PR size doesn't exceed the limit (currently 1000 lines)
|
||||
# Allows for override via '/size-limit-override' comment.
|
||||
# Skipped for bot-authored PRs — dep bumps from Dependabot/Renovate/Aikido
|
||||
# routinely exceed the size limit and shouldn't be gated on it.
|
||||
if: |
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository &&
|
||||
!contains(github.event.pull_request.labels.*.name, 'automation:backport') &&
|
||||
!contains(github.event.pull_request.title, '(backport to') &&
|
||||
github.event.pull_request.user.type != 'Bot'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Check PR size
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: node .github/scripts/quality/check-pr-size.mjs
|
||||
|
||||
changes:
|
||||
name: Detect Changes
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'merge_group'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
janitor: ${{ fromJSON(steps.filter.outputs.results).janitor == true }}
|
||||
code-health: ${{ fromJSON(steps.filter.outputs.results)['code-health'] == true }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Detect changed paths
|
||||
id: filter
|
||||
uses: ./.github/actions/ci-filter
|
||||
with:
|
||||
mode: filter
|
||||
filters: |
|
||||
janitor:
|
||||
packages/testing/playwright/**
|
||||
packages/testing/janitor/**
|
||||
code-health:
|
||||
**/package.json
|
||||
pnpm-workspace.yaml
|
||||
.code-health-baseline.json
|
||||
packages/testing/code-health/**
|
||||
|
||||
check-static-analysis:
|
||||
name: Static Analysis
|
||||
needs: changes
|
||||
if: |
|
||||
github.event_name == 'merge_group' ||
|
||||
needs.changes.outputs.code-health == 'true' ||
|
||||
needs.changes.outputs.janitor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: pnpm turbo run build --filter=@n8n/code-health --filter=@n8n/playwright-janitor
|
||||
|
||||
- name: Run code-health
|
||||
if: github.event_name == 'merge_group' || needs.changes.outputs.code-health == 'true'
|
||||
run: pnpm --filter=@n8n/code-health check
|
||||
|
||||
- name: Run janitor
|
||||
if: ${{ !cancelled() && (github.event_name == 'merge_group' || needs.changes.outputs.janitor == 'true') }}
|
||||
run: pnpm --filter=n8n-playwright janitor
|
||||
|
||||
required-pr-quality-checks:
|
||||
name: Required PR Quality Checks
|
||||
needs: [check-ownership-checkbox, check-pr-size, check-static-analysis]
|
||||
if: always()
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
sparse-checkout: .github/actions/ci-filter
|
||||
sparse-checkout-cone-mode: false
|
||||
- name: Validate required checks
|
||||
uses: ./.github/actions/ci-filter
|
||||
with:
|
||||
mode: validate
|
||||
job-results: ${{ toJSON(needs) }}
|
||||
|
||||
71
.github/workflows/ci-pull-request-review.yml
vendored
71
.github/workflows/ci-pull-request-review.yml
vendored
|
|
@ -1,71 +0,0 @@
|
|||
name: 'CI: Pull Request Review'
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
concurrency:
|
||||
group: ci-review-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
filter:
|
||||
name: Check Changes
|
||||
if: >-
|
||||
github.event.review.state == 'approved' &&
|
||||
github.repository == 'n8n-io/n8n'
|
||||
runs-on: ubuntu-slim
|
||||
outputs:
|
||||
design_system: ${{ fromJSON(steps.ci-filter.outputs.results)['design-system'] == true }}
|
||||
commit_sha: ${{ steps.commit-sha.outputs.sha }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: refs/pull/${{ github.event.pull_request.number }}/merge
|
||||
|
||||
- name: Capture commit SHA
|
||||
id: commit-sha
|
||||
run: echo "sha=$(git rev-parse HEAD)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Check for relevant changes
|
||||
uses: ./.github/actions/ci-filter
|
||||
id: ci-filter
|
||||
with:
|
||||
mode: filter
|
||||
filters: |
|
||||
design-system:
|
||||
packages/frontend/@n8n/design-system/**
|
||||
packages/frontend/@n8n/storybook/**
|
||||
.github/workflows/test-visual-chromatic.yml
|
||||
|
||||
chromatic:
|
||||
name: Chromatic
|
||||
needs: filter
|
||||
# Skip on fork PRs — they don't have access to the Chromatic secret.
|
||||
# This job is intentionally not in `required-review-checks` needs, so it
|
||||
# is non-blocking and won't gate merging.
|
||||
if: >-
|
||||
needs.filter.outputs.design_system == 'true' &&
|
||||
github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: ./.github/workflows/test-visual-chromatic.yml
|
||||
with:
|
||||
ref: ${{ needs.filter.outputs.commit_sha }}
|
||||
secrets: inherit
|
||||
|
||||
# Required by GitHub branch protection rules.
|
||||
# PRs cannot be merged unless this job passes.
|
||||
required-review-checks:
|
||||
name: Required Review Checks
|
||||
needs: [filter]
|
||||
if: always()
|
||||
runs-on: ubuntu-slim
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
sparse-checkout: .github/actions/ci-filter
|
||||
sparse-checkout-cone-mode: false
|
||||
- name: Validate required checks
|
||||
uses: ./.github/actions/ci-filter
|
||||
with:
|
||||
mode: validate
|
||||
job-results: ${{ toJSON(needs) }}
|
||||
213
.github/workflows/ci-pull-requests.yml
vendored
213
.github/workflows/ci-pull-requests.yml
vendored
|
|
@ -22,17 +22,11 @@ jobs:
|
|||
ci: ${{ fromJSON(steps.ci-filter.outputs.results).ci == true }}
|
||||
unit: ${{ fromJSON(steps.ci-filter.outputs.results).unit == true }}
|
||||
e2e: ${{ fromJSON(steps.ci-filter.outputs.results).e2e == true }}
|
||||
dev_server_smoke: ${{ fromJSON(steps.ci-filter.outputs.results)['dev-server-smoke'] == true }}
|
||||
workflows: ${{ fromJSON(steps.ci-filter.outputs.results).workflows == true }}
|
||||
workflow_scripts: ${{ fromJSON(steps.ci-filter.outputs.results)['workflow-scripts'] == true }}
|
||||
db: ${{ fromJSON(steps.ci-filter.outputs.results).db == true }}
|
||||
performance: ${{ fromJSON(steps.ci-filter.outputs.results).performance == true }}
|
||||
e2e_performance: ${{ fromJSON(steps.ci-filter.outputs.results)['e2e-performance'] == true }}
|
||||
instance_ai_workflow_eval: ${{ fromJSON(steps.ci-filter.outputs.results)['instance-ai-workflow-eval'] == true }}
|
||||
commit_sha: ${{ steps.commit-sha.outputs.sha }}
|
||||
merge_base: ${{ steps.ci-filter.outputs.merge-base }}
|
||||
matrix: ${{ steps.generate-matrix.outputs.matrix }}
|
||||
skip_tests: ${{ steps.generate-matrix.outputs.skip-tests }}
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
|
|
@ -60,39 +54,16 @@ jobs:
|
|||
!.github/**
|
||||
e2e:
|
||||
.github/workflows/test-e2e-*.yml
|
||||
.github/workflows/prepare-docker-reusable.yml
|
||||
.github/actions/build-n8n-docker/**
|
||||
.github/actions/load-n8n-docker/**
|
||||
.github/scripts/cleanup-ghcr-images.mjs
|
||||
packages/testing/playwright/**
|
||||
packages/testing/containers/**
|
||||
dev-server-smoke:
|
||||
packages/frontend/editor-ui/vite.config.mts
|
||||
pnpm-workspace.yaml
|
||||
packages/@n8n/*/package.json
|
||||
packages/testing/playwright/tests/dev-server-smoke/**
|
||||
packages/testing/playwright/playwright.config.ts
|
||||
packages/testing/playwright/playwright-projects.ts
|
||||
packages/testing/playwright/package.json
|
||||
.github/workflows/test-dev-server-smoke-reusable.yml
|
||||
workflows: .github/**
|
||||
workflow-scripts: .github/scripts/**
|
||||
performance:
|
||||
packages/testing/performance/**
|
||||
packages/workflow/src/**
|
||||
packages/@n8n/expression-runtime/src/**
|
||||
.github/workflows/test-bench-reusable.yml
|
||||
e2e-performance:
|
||||
packages/testing/playwright/tests/performance/**
|
||||
packages/testing/playwright/utils/performance-helper.ts
|
||||
packages/testing/containers/**
|
||||
.github/workflows/test-e2e-performance-reusable.yml
|
||||
instance-ai-workflow-eval:
|
||||
packages/@n8n/instance-ai/src/**
|
||||
packages/@n8n/instance-ai/evaluations/**
|
||||
packages/cli/src/modules/instance-ai/**
|
||||
packages/core/src/execution-engine/eval-mock-helpers.ts
|
||||
.github/workflows/test-evals-instance-ai*.yml
|
||||
.github/workflows/test-evals-discovery.yml
|
||||
design-system:
|
||||
packages/frontend/@n8n/design-system/**
|
||||
packages/frontend/@n8n/chat/**
|
||||
packages/frontend/@n8n/storybook/**
|
||||
.github/workflows/test-visual-chromatic.yml
|
||||
db:
|
||||
packages/cli/src/databases/**
|
||||
packages/cli/src/modules/*/database/**
|
||||
|
|
@ -107,27 +78,13 @@ jobs:
|
|||
.github/workflows/test-db-reusable.yml
|
||||
|
||||
- name: Setup and Build
|
||||
if: fromJSON(steps.ci-filter.outputs.results).ci || fromJSON(steps.ci-filter.outputs.results).e2e
|
||||
if: fromJSON(steps.ci-filter.outputs.results).ci
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ${{ fromJSON(steps.ci-filter.outputs.results).ci && 'pnpm build' || 'pnpm turbo run build --filter=@n8n/playwright-janitor' }}
|
||||
|
||||
- name: Run format check
|
||||
if: fromJSON(steps.ci-filter.outputs.results).ci
|
||||
run: pnpm format:check
|
||||
|
||||
- name: Generate shard matrix
|
||||
id: generate-matrix
|
||||
if: fromJSON(steps.ci-filter.outputs.results).ci || fromJSON(steps.ci-filter.outputs.results).e2e
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.ci-filter.outputs.changed-files }}
|
||||
MERGE_BASE: ${{ steps.ci-filter.outputs.merge-base }}
|
||||
run: |
|
||||
FILES_CSV=$(echo "$CHANGED_FILES" | tr '\n' ',' | sed 's/,$//')
|
||||
MATRIX=$(node packages/testing/playwright/scripts/distribute-tests.mjs --matrix 16 --orchestrate --impact "--files=$FILES_CSV" "--base=$MERGE_BASE")
|
||||
echo "matrix=$MATRIX" >> "$GITHUB_OUTPUT"
|
||||
echo "skip-tests=$(node -e "process.stdout.write(JSON.parse(process.argv[1])[0]?.skip === true ? 'true' : 'false')" "$MATRIX")" >> "$GITHUB_OUTPUT"
|
||||
|
||||
unit-test:
|
||||
name: Unit tests
|
||||
if: needs.install-and-build.outputs.unit == 'true'
|
||||
|
|
@ -161,93 +118,15 @@ jobs:
|
|||
with:
|
||||
ref: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
|
||||
check-packaging:
|
||||
name: Check packaging
|
||||
if: needs.install-and-build.outputs.ci == 'true'
|
||||
runs-on: ${{ vars.RUNNER_PROVIDER == 'github' && 'ubuntu-latest' || 'blacksmith-4vcpu-ubuntu-2204' }}
|
||||
e2e-tests:
|
||||
name: E2E Tests
|
||||
needs: install-and-build
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
|
||||
- name: Check packaging
|
||||
shell: bash
|
||||
run: |
|
||||
pnpm -r pack --dry-run
|
||||
|
||||
# Seeds the SHA-keyed Docker image cache once so that downstream e2e jobs
|
||||
# (each of which invokes prepare-docker internally) short-circuit to a
|
||||
# cache hit instead of racing to rebuild.
|
||||
prepare-docker:
|
||||
name: Prepare Docker
|
||||
needs: install-and-build
|
||||
if: >-
|
||||
github.repository == 'n8n-io/n8n' &&
|
||||
github.event_name != 'merge_group' &&
|
||||
(needs.install-and-build.outputs.ci == 'true'
|
||||
|| needs.install-and-build.outputs.e2e == 'true'
|
||||
|| needs.install-and-build.outputs.e2e_performance == 'true')
|
||||
uses: ./.github/workflows/prepare-docker-reusable.yml
|
||||
if: (needs.install-and-build.outputs.ci == 'true' || needs.install-and-build.outputs.e2e == 'true') && github.repository == 'n8n-io/n8n'
|
||||
uses: ./.github/workflows/test-e2e-ci-reusable.yml
|
||||
with:
|
||||
branch: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
secrets: inherit
|
||||
|
||||
# Internal-only 1-spec fail-fast sanity check on sqlite.
|
||||
sqlite-sanity:
|
||||
name: 'SQLite: Sanity Check'
|
||||
needs: [install-and-build, prepare-docker]
|
||||
if: >-
|
||||
needs.prepare-docker.result == 'success' &&
|
||||
(needs.install-and-build.outputs.ci == 'true' || needs.install-and-build.outputs.e2e == 'true') &&
|
||||
github.repository == 'n8n-io/n8n' &&
|
||||
github.event_name != 'merge_group' &&
|
||||
github.event.pull_request.head.repo.fork != true
|
||||
uses: ./.github/workflows/test-e2e-reusable.yml
|
||||
with:
|
||||
branch: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
test-mode: docker-artifact
|
||||
test-command: pnpm --filter=n8n-playwright test:container:sqlite:e2e tests/e2e/building-blocks/workflow-entry-points.spec.ts
|
||||
workers: '1'
|
||||
artifact-prefix: sanity
|
||||
secrets: inherit
|
||||
|
||||
# Full e2e run. Internal PRs run multi-main (postgres + redis + caddy + 2 mains + 1 worker).
|
||||
# Fork PRs run sqlite-only and skip @licensed tests (no enterprise license secrets on forks).
|
||||
e2e:
|
||||
name: E2E
|
||||
needs: [install-and-build, prepare-docker]
|
||||
if: >-
|
||||
needs.prepare-docker.result == 'success' &&
|
||||
(needs.install-and-build.outputs.ci == 'true' || needs.install-and-build.outputs.e2e == 'true') &&
|
||||
needs.install-and-build.outputs.skip_tests != 'true' &&
|
||||
github.event_name != 'merge_group'
|
||||
uses: ./.github/workflows/test-e2e-reusable.yml
|
||||
with:
|
||||
branch: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
test-mode: docker-artifact
|
||||
test-command: ${{ github.event.pull_request.head.repo.fork == true && 'pnpm --filter=n8n-playwright test:container:sqlite:e2e --grep-invert=@licensed' || 'pnpm --filter=n8n-playwright test:container:multi-main:e2e' }}
|
||||
workers: '1'
|
||||
pre-generated-matrix: ${{ needs.install-and-build.outputs.matrix }}
|
||||
artifact-prefix: e2e
|
||||
secrets: inherit
|
||||
|
||||
# Boots the editor-ui against the Vite dev server and fails on any console
|
||||
# or page error during load. Catches regressions in dev-mode module
|
||||
# resolution (missing Vite alias, broken workspace package interop) that
|
||||
# the production-bundle e2e job bundles around.
|
||||
dev-server-smoke:
|
||||
name: Dev-server boot smoke
|
||||
needs: install-and-build
|
||||
if: needs.install-and-build.outputs.dev_server_smoke == 'true' && github.event_name != 'merge_group'
|
||||
uses: ./.github/workflows/test-dev-server-smoke-reusable.yml
|
||||
with:
|
||||
ref: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
secrets: inherit
|
||||
|
||||
db-tests:
|
||||
name: DB Tests
|
||||
needs: install-and-build
|
||||
|
|
@ -256,25 +135,6 @@ jobs:
|
|||
with:
|
||||
ref: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
|
||||
performance:
|
||||
name: Performance
|
||||
needs: install-and-build
|
||||
if: needs.install-and-build.outputs.performance == 'true' && github.event_name != 'merge_group'
|
||||
uses: ./.github/workflows/test-bench-reusable.yml
|
||||
with:
|
||||
ref: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
|
||||
e2e-performance:
|
||||
name: E2E Performance
|
||||
needs: [install-and-build, prepare-docker]
|
||||
# Performance is internal-only (license secrets required, not available on forks).
|
||||
if: >-
|
||||
needs.prepare-docker.result == 'success' &&
|
||||
needs.install-and-build.outputs.e2e_performance == 'true' &&
|
||||
github.event.pull_request.head.repo.fork != true
|
||||
uses: ./.github/workflows/test-e2e-performance-reusable.yml
|
||||
secrets: inherit
|
||||
|
||||
security-checks:
|
||||
name: Security Checks
|
||||
needs: install-and-build
|
||||
|
|
@ -293,40 +153,6 @@ jobs:
|
|||
ref: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
secrets: inherit
|
||||
|
||||
# Depends on prepare-docker so the eval workflow can load the SHA-keyed image cache.
|
||||
# prepare-docker may be skipped (its filter excludes .github/**); the eval falls back to a local build.
|
||||
instance-ai-workflow-evals:
|
||||
name: Instance AI Workflow Evals
|
||||
needs: [install-and-build, prepare-docker]
|
||||
if: >-
|
||||
!cancelled() &&
|
||||
needs.install-and-build.result == 'success' &&
|
||||
(needs.prepare-docker.result == 'success' || needs.prepare-docker.result == 'skipped') &&
|
||||
needs.install-and-build.outputs.instance_ai_workflow_eval == 'true' &&
|
||||
github.repository == 'n8n-io/n8n' &&
|
||||
(github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork)
|
||||
uses: ./.github/workflows/test-evals-instance-ai.yml
|
||||
with:
|
||||
branch: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
secrets: inherit
|
||||
|
||||
# In-process discovery eval — asserts the orchestrator reaches for browser/computer-use
|
||||
# tools at OAuth/screenshot moments. Lightweight (no Docker), runs in parallel with the
|
||||
# heavy workflow eval. Non-blocking initially; promote to required after stability.
|
||||
instance-ai-discovery-evals:
|
||||
name: Instance AI Discovery Evals
|
||||
needs: install-and-build
|
||||
if: >-
|
||||
!cancelled() &&
|
||||
needs.install-and-build.result == 'success' &&
|
||||
needs.install-and-build.outputs.instance_ai_workflow_eval == 'true' &&
|
||||
github.repository == 'n8n-io/n8n' &&
|
||||
(github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork)
|
||||
uses: ./.github/workflows/test-evals-discovery.yml
|
||||
with:
|
||||
branch: ${{ needs.install-and-build.outputs.commit_sha }}
|
||||
secrets: inherit
|
||||
|
||||
# This job is required by GitHub branch protection rules.
|
||||
# PRs cannot be merged unless this job passes.
|
||||
required-checks:
|
||||
|
|
@ -337,12 +163,8 @@ jobs:
|
|||
unit-test,
|
||||
typecheck,
|
||||
lint,
|
||||
check-packaging,
|
||||
sqlite-sanity,
|
||||
e2e,
|
||||
dev-server-smoke,
|
||||
e2e-tests,
|
||||
db-tests,
|
||||
performance,
|
||||
security-checks,
|
||||
workflow-scripts,
|
||||
]
|
||||
|
|
@ -358,14 +180,3 @@ jobs:
|
|||
with:
|
||||
mode: validate
|
||||
job-results: ${{ toJSON(needs) }}
|
||||
|
||||
# Posts a QA metrics comparison comment on the PR.
|
||||
# Runs after all checks so any job can emit metrics before this reports.
|
||||
post-qa-metrics-comment:
|
||||
name: QA Metrics
|
||||
needs: [required-checks, e2e-performance]
|
||||
if: always()
|
||||
uses: ./.github/workflows/util-qa-metrics-comment-reusable.yml
|
||||
with:
|
||||
metrics: memory-heap-used-baseline,memory-rss-baseline,instance-ai-heap-used-baseline,instance-ai-rss-baseline,docker-image-size-n8n,docker-image-size-runners
|
||||
secrets: inherit
|
||||
|
|
|
|||
41
.github/workflows/docker-build-push.yml
vendored
41
.github/workflows/docker-build-push.yml
vendored
|
|
@ -7,7 +7,7 @@ name: 'Docker: Build and Push'
|
|||
|
||||
env:
|
||||
NODE_OPTIONS: '--max-old-space-size=7168'
|
||||
NODE_VERSION: '24.14.1'
|
||||
NODE_VERSION: '24.13.1'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
|
|
@ -83,9 +83,6 @@ jobs:
|
|||
primary_ghcr_manifest_tag: ${{ steps.determine-tags.outputs.n8n_primary_tag }}
|
||||
runners_primary_ghcr_manifest_tag: ${{ steps.determine-tags.outputs.runners_primary_tag }}
|
||||
runners_distroless_primary_ghcr_manifest_tag: ${{ steps.determine-tags.outputs.runners_distroless_primary_tag }}
|
||||
n8n_sha_manifest_tag: ${{ steps.determine-tags.outputs.n8n_sha_primary_tag }}
|
||||
runners_sha_manifest_tag: ${{ steps.determine-tags.outputs.runners_sha_primary_tag }}
|
||||
runners_distroless_sha_manifest_tag: ${{ steps.determine-tags.outputs.runners_distroless_sha_primary_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
|
@ -108,7 +105,6 @@ jobs:
|
|||
--all \
|
||||
--version "${{ needs.determine-build-context.outputs.n8n_version }}" \
|
||||
--platform "${{ matrix.docker_platform }}" \
|
||||
--sha "${GITHUB_SHA::7}" \
|
||||
${{ needs.determine-build-context.outputs.push_to_docker == 'true' && '--include-docker' || '' }}
|
||||
|
||||
echo "=== Generated Docker Tags ==="
|
||||
|
|
@ -236,11 +232,6 @@ jobs:
|
|||
create_manifest "runners" "${{ needs.build-and-push-docker.outputs.runners_primary_ghcr_manifest_tag }}"
|
||||
create_manifest "runners-distroless" "${{ needs.build-and-push-docker.outputs.runners_distroless_primary_ghcr_manifest_tag }}"
|
||||
|
||||
# Create SHA-tagged manifests (immutable references for deployments)
|
||||
create_manifest "n8n (sha)" "${{ needs.build-and-push-docker.outputs.n8n_sha_manifest_tag }}"
|
||||
create_manifest "runners (sha)" "${{ needs.build-and-push-docker.outputs.runners_sha_manifest_tag }}"
|
||||
create_manifest "runners-distroless (sha)" "${{ needs.build-and-push-docker.outputs.runners_distroless_sha_manifest_tag }}"
|
||||
|
||||
- name: Create Docker Hub manifests
|
||||
if: needs.determine-build-context.outputs.push_to_docker == 'true'
|
||||
run: |
|
||||
|
|
@ -254,8 +245,6 @@ jobs:
|
|||
["runners-distroless"]="${VERSION}-distroless"
|
||||
)
|
||||
|
||||
SHORT_SHA="${GITHUB_SHA::7}"
|
||||
|
||||
for image in "${!images[@]}"; do
|
||||
TAG_SUFFIX="${images[$image]}"
|
||||
IMAGE_NAME="${image//-distroless/}" # Remove -distroless from image name
|
||||
|
|
@ -265,20 +254,6 @@ jobs:
|
|||
--tag "${DOCKER_BASE}/${IMAGE_NAME}:${TAG_SUFFIX}" \
|
||||
"${DOCKER_BASE}/${IMAGE_NAME}:${TAG_SUFFIX}-amd64" \
|
||||
"${DOCKER_BASE}/${IMAGE_NAME}:${TAG_SUFFIX}-arm64"
|
||||
|
||||
# Create SHA-tagged manifest (immutable reference)
|
||||
# For distroless, insert SHA between version and -distroless suffix
|
||||
# to match docker-tags.mjs format: nightly-abc1234-distroless (not nightly-distroless-abc1234)
|
||||
if [[ "$image" == *"-distroless"* ]]; then
|
||||
SHA_SUFFIX="${VERSION}-${SHORT_SHA}-distroless"
|
||||
else
|
||||
SHA_SUFFIX="${TAG_SUFFIX}-${SHORT_SHA}"
|
||||
fi
|
||||
echo "Creating Docker Hub SHA manifest for $image: ${SHA_SUFFIX}"
|
||||
docker buildx imagetools create \
|
||||
--tag "${DOCKER_BASE}/${IMAGE_NAME}:${SHA_SUFFIX}" \
|
||||
"${DOCKER_BASE}/${IMAGE_NAME}:${SHA_SUFFIX}-amd64" \
|
||||
"${DOCKER_BASE}/${IMAGE_NAME}:${SHA_SUFFIX}-arm64"
|
||||
done
|
||||
|
||||
- name: Get manifest digests for attestation
|
||||
|
|
@ -304,6 +279,7 @@ jobs:
|
|||
curl -v "${{ env.SUCCESS_URL }}" || echo "Failed to call success URL"
|
||||
shell: bash
|
||||
|
||||
# SLSA L3 Provenance - Must use version tags (@vX.Y.Z), NOT SHAs
|
||||
provenance-n8n:
|
||||
name: SLSA Provenance (n8n)
|
||||
needs: [determine-build-context, build-and-push-docker, create_multi_arch_manifest]
|
||||
|
|
@ -314,7 +290,6 @@ jobs:
|
|||
id-token: write
|
||||
packages: write
|
||||
actions: read
|
||||
# SLSA L3 Provenance - Must use version tags (@vX.Y.Z), NOT SHAs
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.1.0
|
||||
with:
|
||||
image: ${{ needs.create_multi_arch_manifest.outputs.n8n_image }}
|
||||
|
|
@ -333,7 +308,6 @@ jobs:
|
|||
id-token: write
|
||||
packages: write
|
||||
actions: read
|
||||
# SLSA L3 Provenance - Must use version tags (@vX.Y.Z), NOT SHAs
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.1.0
|
||||
with:
|
||||
image: ${{ needs.create_multi_arch_manifest.outputs.runners_image }}
|
||||
|
|
@ -352,7 +326,6 @@ jobs:
|
|||
id-token: write
|
||||
packages: write
|
||||
actions: read
|
||||
# SLSA L3 Provenance - Must use version tags (@vX.Y.Z), NOT SHAs
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.1.0
|
||||
with:
|
||||
image: ${{ needs.create_multi_arch_manifest.outputs.runners_distroless_image }}
|
||||
|
|
@ -364,15 +337,7 @@ jobs:
|
|||
# VEX Attestation - Documents which CVEs affect us (security/vex.openvex.json)
|
||||
vex-attestation:
|
||||
name: VEX Attestation
|
||||
needs:
|
||||
[
|
||||
determine-build-context,
|
||||
build-and-push-docker,
|
||||
create_multi_arch_manifest,
|
||||
provenance-n8n,
|
||||
provenance-runners,
|
||||
provenance-runners-distroless,
|
||||
]
|
||||
needs: [determine-build-context, build-and-push-docker, create_multi_arch_manifest, provenance-n8n, provenance-runners, provenance-runners-distroless]
|
||||
if: |
|
||||
always() &&
|
||||
needs.create_multi_arch_manifest.result == 'success' &&
|
||||
|
|
|
|||
49
.github/workflows/prepare-docker-reusable.yml
vendored
49
.github/workflows/prepare-docker-reusable.yml
vendored
|
|
@ -1,49 +0,0 @@
|
|||
name: 'Prepare n8n Docker (reusable)'
|
||||
|
||||
# Reusable workflow that ensures the n8n + runners CI test images for the
|
||||
# current commit SHA are present in the GHA cache. Cache-aware: if another
|
||||
# job in the same run already populated `n8n-docker-image-<sha>`, this
|
||||
# becomes a no-op.
|
||||
#
|
||||
# Downstream jobs restore the same SHA-keyed cache via load-n8n-docker.
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build-variant:
|
||||
description: 'standard or coverage'
|
||||
required: false
|
||||
default: 'standard'
|
||||
type: string
|
||||
runner:
|
||||
description: 'Runner for the build.'
|
||||
required: false
|
||||
default: 'blacksmith-4vcpu-ubuntu-2204'
|
||||
type: string
|
||||
branch:
|
||||
description: 'Git ref to check out.'
|
||||
required: false
|
||||
default: ''
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: 'Build & publish image'
|
||||
runs-on: ${{ inputs.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: ${{ inputs.branch || github.ref }}
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Build and publish image
|
||||
uses: ./.github/actions/build-n8n-docker
|
||||
with:
|
||||
build-variant: ${{ inputs.build-variant }}
|
||||
env:
|
||||
QA_METRICS_WEBHOOK_URL: ${{ secrets.QA_METRICS_WEBHOOK_URL }}
|
||||
QA_METRICS_WEBHOOK_USER: ${{ secrets.QA_METRICS_WEBHOOK_USER }}
|
||||
QA_METRICS_WEBHOOK_PASSWORD: ${{ secrets.QA_METRICS_WEBHOOK_PASSWORD }}
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
name: 'Release: Build Daytona snapshot'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
n8n_version:
|
||||
description: 'n8n version to build the Daytona snapshot for'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
DAYTONA_API_KEY:
|
||||
required: true
|
||||
DAYTONA_API_URL:
|
||||
required: false
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
n8n_version:
|
||||
description: 'n8n version to build the Daytona snapshot for (e.g. 1.123.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-snapshot:
|
||||
name: Build versioned Daytona snapshot
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup Node.js and build
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
|
||||
- name: Build versioned Daytona snapshot
|
||||
env:
|
||||
N8N_VERSION: ${{ inputs.n8n_version }}
|
||||
DAYTONA_API_KEY: ${{ secrets.DAYTONA_API_KEY }}
|
||||
DAYTONA_API_URL: ${{ secrets.DAYTONA_API_URL }}
|
||||
run: node packages/@n8n/instance-ai/scripts/build-snapshot.cjs --version "$N8N_VERSION"
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
name: 'Release: Create GitHub Releases'
|
||||
run-name: 'Creating GitHub Releases for ${{ inputs.version-tag }} (${{ inputs.track }})'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
track:
|
||||
required: true
|
||||
type: string
|
||||
version-tag:
|
||||
required: true
|
||||
type: string
|
||||
body:
|
||||
required: true
|
||||
type: string
|
||||
commit:
|
||||
required: true
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
track:
|
||||
description: 'Release Track'
|
||||
required: true
|
||||
type: choice
|
||||
options: [stable, beta, v1]
|
||||
version-tag:
|
||||
description: 'Version tag (e.g. n8n@2.7.0).'
|
||||
required: true
|
||||
type: string
|
||||
body:
|
||||
description: 'Release notes body.'
|
||||
required: true
|
||||
type: string
|
||||
commit:
|
||||
description: 'Commitish the release points to (e.g. branch name or SHA).'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
create-github-releases:
|
||||
name: Create GitHub releases
|
||||
runs-on: ubuntu-slim
|
||||
environment: release
|
||||
|
||||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate_token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.N8N_ASSISTANT_APP_ID }}
|
||||
private-key: ${{ secrets.N8N_ASSISTANT_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Create GitHub releases
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
RELEASE_TAG: ${{ inputs.version-tag }}
|
||||
BODY: ${{ inputs.body }}
|
||||
IS_PRE_RELEASE: ${{ inputs.track == 'beta' }}
|
||||
MAKE_LATEST: ${{ inputs.track == 'stable' }}
|
||||
COMMIT: ${{ inputs.commit }}
|
||||
ADDITIONAL_TAGS: ${{ inputs.track }}
|
||||
run: node ./.github/scripts/create-github-release.mjs
|
||||
19
.github/workflows/release-create-minor-pr.yml
vendored
19
.github/workflows/release-create-minor-pr.yml
vendored
|
|
@ -2,8 +2,8 @@ name: 'Release: Create Minor Release PR'
|
|||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: 0 8 * * 2 # 9am CET (UTC+1), Tuesday
|
||||
# schedule:
|
||||
# - cron: 0 13 * * 1
|
||||
|
||||
jobs:
|
||||
create-release-pr:
|
||||
|
|
@ -13,18 +13,3 @@ jobs:
|
|||
with:
|
||||
base-branch: master
|
||||
release-type: minor
|
||||
|
||||
notify-slack:
|
||||
name: Notify Slack
|
||||
needs: [create-release-pr]
|
||||
if: needs.create-release-pr.result == 'success' && needs.create-release-pr.outputs.pull-request-number != ''
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Post to Slack
|
||||
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.RELEASE_HELPER_SLACK_TOKEN }}
|
||||
payload: |
|
||||
channel: C036AELNMV0
|
||||
text: ":rocket: Minor release PR created. <${{ github.server_url }}/${{ github.repository }}/pull/${{ needs.create-release-pr.outputs.pull-request-number }}|View PR> — close it to cancel the release."
|
||||
|
|
|
|||
22
.github/workflows/release-create-patch-pr.yml
vendored
22
.github/workflows/release-create-patch-pr.yml
vendored
|
|
@ -2,13 +2,6 @@ name: 'Release: Create Patch Release PR'
|
|||
run-name: 'Release: Create Patch Release PR for track ${{ inputs.track }}'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
track:
|
||||
description: 'Release Track'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
track:
|
||||
|
|
@ -60,18 +53,3 @@ jobs:
|
|||
with:
|
||||
base-branch: ${{ needs.determine-version-info.outputs.release_candidate_branch }}
|
||||
release-type: patch
|
||||
|
||||
notify-slack:
|
||||
name: Notify Slack
|
||||
needs: [create-release-pr]
|
||||
if: needs.create-release-pr.result == 'success' && needs.create-release-pr.outputs.pull-request-number != ''
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Post to Slack
|
||||
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.RELEASE_HELPER_SLACK_TOKEN }}
|
||||
payload: |
|
||||
channel: C036AELNMV0
|
||||
text: ":rocket: Patch release PR created for *${{ inputs.track }}* track. <${{ github.server_url }}/${{ github.repository }}/pull/${{ needs.create-release-pr.outputs.pull-request-number }}|View PR> — close it to cancel the release."
|
||||
|
|
|
|||
5
.github/workflows/release-create-pr.yml
vendored
5
.github/workflows/release-create-pr.yml
vendored
|
|
@ -13,11 +13,6 @@ on:
|
|||
required: true
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
pull-request-number:
|
||||
description: 'Number of the created pull request'
|
||||
value: ${{ jobs.create-release-pr.outputs.pull-request-number }}
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
base-branch:
|
||||
|
|
|
|||
|
|
@ -1,50 +0,0 @@
|
|||
name: 'Release: Promote GitHub Releases'
|
||||
run-name: 'Promoting GitHub Release ${{ inputs.version-tag }} to latest'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
version-tag:
|
||||
required: true
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version-tag:
|
||||
description: 'Version tag (e.g. n8n@2.7.0).'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
promote-github-releases:
|
||||
name: Promote GitHub release to latest
|
||||
runs-on: ubuntu-slim
|
||||
environment: release
|
||||
|
||||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate_token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.N8N_ASSISTANT_APP_ID }}
|
||||
private-key: ${{ secrets.N8N_ASSISTANT_PRIVATE_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Promote GitHub releases
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
RELEASE_TAG: ${{ inputs.version-tag }}
|
||||
run: node ./.github/scripts/promote-github-release.mjs
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
name: 'Release: Publish New Package'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package-path:
|
||||
description: 'Path to the package to publish (e.g. packages/@n8n/my-new-package)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: release-new-package-${{ github.event.inputs.package-path }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
publish-to-npm:
|
||||
name: Publish to NPM
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
environment: release
|
||||
|
||||
steps:
|
||||
- name: Check branch
|
||||
if: github.ref != 'refs/heads/master'
|
||||
run: |
|
||||
echo "::error::This workflow can only be run from the master branch"
|
||||
exit 1
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Setup and Build
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
|
||||
- name: Check package does not already exist on NPM
|
||||
working-directory: ${{ github.event.inputs.package-path }}
|
||||
run: |
|
||||
PACKAGE_NAME=$(node -p "require('./package.json').name")
|
||||
if [ "$PACKAGE_NAME" = "n8n-monorepo" ]; then
|
||||
echo "::error::Package 'n8n-monorepo' cannot be published."
|
||||
exit 1
|
||||
fi
|
||||
if npm view "$PACKAGE_NAME" > /dev/null 2>&1; then
|
||||
echo "::error::Package '$PACKAGE_NAME' already exists on NPM. Use the regular release workflow for updates."
|
||||
exit 1
|
||||
fi
|
||||
echo "Package '$PACKAGE_NAME' does not exist on NPM yet. Proceeding with publish."
|
||||
|
||||
- name: Configure NPM token
|
||||
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_DIST_TAG_AND_INITIAL_PUBLISH_TOKEN }}" > ~/.npmrc
|
||||
|
||||
- name: Publish package
|
||||
working-directory: ${{ github.event.inputs.package-path }}
|
||||
run: pnpm publish --access public --no-git-checks
|
||||
|
|
@ -49,16 +49,6 @@ jobs:
|
|||
version: ${{ inputs.new_stable_version }}
|
||||
release-channel: stable
|
||||
|
||||
promote-previous-minor-github-release-to-latest:
|
||||
name: Promote previous minor Github Release to latest
|
||||
if: |
|
||||
inputs.release_type != 'rc' &&
|
||||
inputs.bump == 'minor'
|
||||
uses: ./.github/workflows/release-promote-github-release.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
version-tag: 'n8n@${{ inputs.new_stable_version }}'
|
||||
|
||||
ensure-release-candidate-branches:
|
||||
name: 'Ensure release candidate branches'
|
||||
if: |
|
||||
|
|
@ -66,14 +56,6 @@ jobs:
|
|||
uses: ./.github/workflows/util-ensure-release-candidate-branches.yml
|
||||
secrets: inherit
|
||||
|
||||
ensure-correct-latest-version-on-npm:
|
||||
name: Ensure correct latest version on npm
|
||||
if: |
|
||||
inputs.bump == 'minor' ||
|
||||
inputs.track == 'stable'
|
||||
uses: ./.github/workflows/release-set-stable-npm-packages-to-latest.yml
|
||||
secrets: inherit
|
||||
|
||||
populate-cloud-with-releases:
|
||||
name: 'Populate cloud database with releases'
|
||||
uses: ./.github/workflows/release-populate-cloud-with-releases.yml
|
||||
|
|
|
|||
60
.github/workflows/release-publish.yml
vendored
60
.github/workflows/release-publish.yml
vendored
|
|
@ -76,13 +76,15 @@ jobs:
|
|||
cp README.md packages/cli/README.md
|
||||
sed -i "s/default: 'dev'/default: '${{ needs.determine-version-info.outputs.release_type }}'/g" packages/cli/dist/config/schema.js
|
||||
|
||||
# Publishing via `pnpm publish -r` is idempotent, as it checks if the package exists
|
||||
# and only publishes if it doesn't. This is why we do the sub-packages before the main n8n package.
|
||||
# So if anything goes wrong, we can easily re-try the run instead of abandoning the release.
|
||||
- name: Publish n8n to NPM with rc tag
|
||||
env:
|
||||
PUBLISH_BRANCH: ${{ github.event.pull_request.base.ref }}
|
||||
run: pnpm --filter n8n publish --publish-branch "$PUBLISH_BRANCH" --access public --tag rc --no-git-checks
|
||||
|
||||
- name: Publish other packages to NPM
|
||||
env:
|
||||
PUBLISH_BRANCH: ${{ github.event.pull_request.base.ref }}
|
||||
PUBLISH_TAG: ${{ needs.determine-version-info.outputs.track }}
|
||||
PUBLISH_TAG: ${{ needs.determine-version-info.outputs.track == 'stable' && 'latest' || needs.determine-version-info.outputs.track }}
|
||||
run: |
|
||||
# Prefix version-like track names (e.g. "1", "v1") to avoid npm rejecting them as semver ranges
|
||||
if [[ "$PUBLISH_TAG" =~ ^v?[0-9] ]]; then
|
||||
|
|
@ -90,12 +92,6 @@ jobs:
|
|||
fi
|
||||
pnpm publish -r --filter '!n8n' --publish-branch "$PUBLISH_BRANCH" --access public --tag "$PUBLISH_TAG" --no-git-checks
|
||||
|
||||
# If we don't use the --tag rc, all releases will default to "latest".
|
||||
- name: Publish n8n to NPM with rc tag
|
||||
env:
|
||||
PUBLISH_BRANCH: ${{ github.event.pull_request.base.ref }}
|
||||
run: pnpm --filter n8n publish --publish-branch "$PUBLISH_BRANCH" --access public --tag rc --no-git-checks
|
||||
|
||||
- name: Cleanup rc tag
|
||||
run: npm dist-tag rm n8n rc
|
||||
continue-on-error: true
|
||||
|
|
@ -109,26 +105,34 @@ jobs:
|
|||
release_type: ${{ needs.determine-version-info.outputs.release_type }}
|
||||
secrets: inherit
|
||||
|
||||
build-daytona-snapshot:
|
||||
name: Build Daytona snapshot
|
||||
needs: [determine-version-info, publish-to-npm]
|
||||
if: github.event.pull_request.merged == true
|
||||
uses: ./.github/workflows/release-build-daytona-snapshot.yml
|
||||
with:
|
||||
n8n_version: ${{ needs.determine-version-info.outputs.version }}
|
||||
secrets: inherit
|
||||
|
||||
create-github-release:
|
||||
name: Create GitHub Release
|
||||
name: Create a GitHub Release
|
||||
needs: [determine-version-info, publish-to-npm, publish-to-docker-hub]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.merged == true
|
||||
uses: ./.github/workflows/release-create-github-releases.yml
|
||||
with:
|
||||
track: ${{ needs.determine-version-info.outputs.track }}
|
||||
version-tag: 'n8n@${{ needs.determine-version-info.outputs.version }}'
|
||||
body: ${{ github.event.pull_request.body }}
|
||||
commit: ${{ github.event.pull_request.base.ref }}
|
||||
secrets: inherit
|
||||
timeout-minutes: 5
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate_token
|
||||
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
|
||||
with:
|
||||
app-id: ${{ secrets.N8N_ASSISTANT_APP_ID }}
|
||||
private-key: ${{ secrets.N8N_ASSISTANT_PRIVATE_KEY }}
|
||||
|
||||
- name: Create a Release on GitHub
|
||||
uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1.20.0
|
||||
with:
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
commit: ${{github.event.pull_request.base.ref}}
|
||||
tag: 'n8n@${{ needs.determine-version-info.outputs.version }}'
|
||||
prerelease: ${{ needs.determine-version-info.outputs.track == 'beta' }}
|
||||
makeLatest: ${{ needs.determine-version-info.outputs.track == 'stable' }}
|
||||
body: ${{github.event.pull_request.body}}
|
||||
|
||||
move-track-tag:
|
||||
name: Move track tag
|
||||
|
|
@ -196,13 +200,11 @@ jobs:
|
|||
create-github-release,
|
||||
move-track-tag,
|
||||
promote-stable-tag,
|
||||
build-daytona-snapshot,
|
||||
]
|
||||
if: |
|
||||
always() &&
|
||||
needs.publish-to-npm.result == 'success' &&
|
||||
needs.create-github-release.result == 'success' &&
|
||||
needs.build-daytona-snapshot.result == 'success' &&
|
||||
(needs.move-track-tag.result == 'success' || needs.move-track-tag.result == 'skipped') &&
|
||||
(needs.promote-stable-tag.result == 'success' || needs.promote-stable-tag.result == 'skipped')
|
||||
uses: ./.github/workflows/release-publish-post-release.yml
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
with:
|
||||
node-version: 24.14.1
|
||||
node-version: 24.13.1
|
||||
|
||||
# Remove after https://github.com/npm/cli/issues/8547 gets resolved
|
||||
- run: echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > ~/.npmrc
|
||||
|
|
|
|||
17
.github/workflows/release-schedule-patch-prs.yml
vendored
17
.github/workflows/release-schedule-patch-prs.yml
vendored
|
|
@ -1,17 +0,0 @@
|
|||
name: 'Release: Schedule Patch Release PRs'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 8 * * 3-5,1' # 9am CET (UTC+1), Wednesday - Friday and Monday. (Minor release on tuesday)
|
||||
|
||||
jobs:
|
||||
create-patch-prs:
|
||||
name: Create patch release PR (${{ matrix.track }})
|
||||
strategy:
|
||||
matrix:
|
||||
track: [stable, beta, v1]
|
||||
uses: ./.github/workflows/release-create-patch-pr.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
track: ${{ matrix.track }}
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
name: 'Release: Set stable npm packages to latest'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
promote-github-releases:
|
||||
name: Promote current stable releases as latest
|
||||
runs-on: ubuntu-slim
|
||||
environment: release
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
ref: refs/tags/stable
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: ./.github/actions/setup-nodejs
|
||||
with:
|
||||
build-command: ''
|
||||
install-command: pnpm install --frozen-lockfile --dir ./.github/scripts --ignore-workspace
|
||||
|
||||
- name: Set npm packages to latest
|
||||
run: node ./.github/scripts/set-latest-for-monorepo-packages.mjs
|
||||
env:
|
||||
NPM_TOKEN: ${{ secrets.NPM_DIST_TAG_AND_INITIAL_PUBLISH_TOKEN }}
|
||||
|
|
@ -56,7 +56,7 @@ jobs:
|
|||
output-file: sbom-source.cdx.json
|
||||
|
||||
- name: Attest SBOM for source release
|
||||
uses: actions/attest@59d89421af93a897026c735860bf21b6eb4f7b26 # v4.1.0
|
||||
uses: actions/attest-sbom@07e74fc4e78d1aad915e867f9a094073a9f71527 # v4.0.0
|
||||
with:
|
||||
subject-path: './package.json'
|
||||
sbom-path: 'sbom-source.cdx.json'
|
||||
|
|
|
|||
3
.github/workflows/test-bench-reusable.yml
vendored
3
.github/workflows/test-bench-reusable.yml
vendored
|
|
@ -16,9 +16,6 @@ on:
|
|||
type: string
|
||||
default: ''
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=6144
|
||||
|
||||
jobs:
|
||||
bench:
|
||||
name: Benchmarks
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user