feat(OpenAI Chat Model Node): Add OpenAI account OAuth support

This commit is contained in:
Etienne Lescot 2026-04-27 10:53:39 +02:00
parent be4ef22533
commit a9fe2c46b7
26 changed files with 3260 additions and 60 deletions

View File

@ -9,6 +9,12 @@ import {
type ISupplyDataFunctions,
type SupplyData,
} from 'n8n-workflow';
import {
getOpenAiApiKey,
getOpenAiCredentialType,
OPEN_AI_API_CREDENTIAL_TYPE,
OPEN_AI_OAUTH2_CREDENTIAL_TYPE,
} from 'n8n-nodes-base/dist/credentials/OpenAiApi.credentials';
import { checkDomainRestrictions } from '@utils/checkDomainRestrictions';
import { mergeCustomHeaders } from '@utils/helpers';
@ -22,10 +28,14 @@ import {
} from '@n8n/ai-utilities';
import { formatBuiltInTools, prepareAdditionalResponsesParams } from './common';
import { searchModels } from './methods/loadModels';
import { OpenAiAccountChatModel } from './OpenAiAccountChatModel';
import type { ModelOptions } from './types';
import { Container } from '@n8n/di';
import { AiConfig } from '@n8n/config';
const OPENAI_API_DEFAULT_MODEL = 'gpt-5-mini';
const OPENAI_ACCOUNT_DEFAULT_MODEL = 'gpt-5.4-mini';
const INCLUDE_JSON_WARNING: INodeProperties = {
displayName:
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',
@ -39,6 +49,30 @@ const OPENAI_MODEL_BUILDER_HINT = {
'Prefer the GPT-5.4 family: the flagship variant (e.g. `gpt-5.4`) for general use, a `-mini` / `-nano` variant when the task explicitly calls for cost-efficiency, or `-pro` only when the user asks for maximum capability. Never use gpt-4o, gpt-4-turbo, gpt-4, gpt-3.5, or earlier — those are superseded by the GPT-5 family and are not valid choices.',
};
function isOpenAiAccountReasoningEffort(value: unknown): value is 'low' | 'medium' | 'high' {
return value === 'low' || value === 'medium' || value === 'high';
}
function hideForOpenAiAccountAuth(
displayOptions: INodeProperties['displayOptions'] = {},
): INodeProperties['displayOptions'] {
return {
...displayOptions,
hide: {
...displayOptions.hide,
'/authentication': ['oAuth2'],
},
};
}
function normalizeOpenAiAccountMaxOutputTokens(value: unknown): number | undefined {
return typeof value === 'number' && value > 0 ? value : undefined;
}
function normalizeModelName(value: unknown): string | undefined {
return typeof value === 'string' && value.trim().length > 0 ? value : undefined;
}
const completionsResponseFormat: INodeProperties = {
displayName: 'Response Format',
name: 'responseFormat',
@ -109,8 +143,22 @@ export class LmChatOpenAi implements INodeType {
outputNames: ['Model'],
credentials: [
{
name: 'openAiApi',
name: OPEN_AI_API_CREDENTIAL_TYPE,
required: true,
displayOptions: {
show: {
authentication: ['apiKey'],
},
},
},
{
name: OPEN_AI_OAUTH2_CREDENTIAL_TYPE,
required: true,
displayOptions: {
show: {
authentication: ['oAuth2'],
},
},
},
],
requestDefaults: {
@ -120,6 +168,23 @@ export class LmChatOpenAi implements INodeType {
},
properties: [
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName: 'Authentication',
name: 'authentication',
type: 'options',
default: 'apiKey',
options: [
{
name: 'API Key',
value: 'apiKey',
},
{
name: 'OpenAI Account',
value: 'oAuth2',
description: 'Connect a ChatGPT/OpenAI account without an API key',
},
],
},
{
...INCLUDE_JSON_WARNING,
displayOptions: {
@ -195,11 +260,12 @@ export class LmChatOpenAi implements INodeType {
property: 'model',
},
},
default: 'gpt-5-mini',
default: OPENAI_API_DEFAULT_MODEL,
builderHint: OPENAI_MODEL_BUILDER_HINT,
displayOptions: {
hide: {
'@version': [{ _cnd: { gte: 1.2 } }],
show: {
'@version': [{ _cnd: { lte: 1.1 } }],
'/authentication': ['apiKey'],
},
},
},
@ -207,7 +273,7 @@ export class LmChatOpenAi implements INodeType {
displayName: 'Model',
name: 'model',
type: 'resourceLocator',
default: { mode: 'list', value: 'gpt-5-mini' },
default: { mode: 'list', value: OPENAI_API_DEFAULT_MODEL },
builderHint: OPENAI_MODEL_BUILDER_HINT,
required: true,
modes: [
@ -225,13 +291,46 @@ export class LmChatOpenAi implements INodeType {
displayName: 'ID',
name: 'id',
type: 'string',
placeholder: 'gpt-5-mini',
placeholder: OPENAI_API_DEFAULT_MODEL,
},
],
description: 'The model. Choose from the list, or specify an ID.',
displayOptions: {
hide: {
'@version': [{ _cnd: { lte: 1.1 } }],
show: {
'@version': [{ _cnd: { gte: 1.2 } }],
'/authentication': ['apiKey'],
},
},
},
{
displayName: 'Model',
name: 'openAiAccountModel',
type: 'resourceLocator',
default: { mode: 'list', value: OPENAI_ACCOUNT_DEFAULT_MODEL },
builderHint: OPENAI_MODEL_BUILDER_HINT,
required: true,
modes: [
{
displayName: 'From List',
name: 'list',
type: 'list',
placeholder: 'Select a model...',
typeOptions: {
searchListMethod: 'searchModels',
searchable: true,
},
},
{
displayName: 'ID',
name: 'id',
type: 'string',
placeholder: OPENAI_ACCOUNT_DEFAULT_MODEL,
},
],
description: 'The OpenAI account model. Choose from the list, or specify an ID.',
displayOptions: {
show: {
'/authentication': ['oAuth2'],
},
},
},
@ -356,12 +455,12 @@ export class LmChatOpenAi implements INodeType {
description: 'Whether to allow the model to execute code in a sandboxed environment',
},
],
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Options',
@ -377,11 +476,11 @@ export class LmChatOpenAi implements INodeType {
default: 'https://api.openai.com/v1',
description: 'Override the default base URL for the API',
type: 'string',
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
hide: {
'@version': [{ _cnd: { gte: 1.1 } }],
},
},
}),
},
{
displayName: 'Frequency Penalty',
@ -391,6 +490,7 @@ export class LmChatOpenAi implements INodeType {
description:
"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim",
type: 'number',
displayOptions: hideForOpenAiAccountAuth(),
},
{
displayName: 'Maximum Number of Tokens',
@ -405,20 +505,20 @@ export class LmChatOpenAi implements INodeType {
},
{
...completionsResponseFormat,
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { lt: 1.3 } }],
},
},
}),
},
{
...completionsResponseFormat,
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [false],
},
},
}),
},
{
displayName: 'Response Format',
@ -518,12 +618,12 @@ export class LmChatOpenAi implements INodeType {
],
},
],
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Presence Penalty',
@ -533,6 +633,7 @@ export class LmChatOpenAi implements INodeType {
description:
"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics",
type: 'number',
displayOptions: hideForOpenAiAccountAuth(),
},
{
displayName: 'Sampling Temperature',
@ -542,6 +643,7 @@ export class LmChatOpenAi implements INodeType {
description:
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
type: 'number',
displayOptions: hideForOpenAiAccountAuth(),
},
{
displayName: 'Reasoning Effort',
@ -588,6 +690,7 @@ export class LmChatOpenAi implements INodeType {
default: 2,
description: 'Maximum number of retries to attempt',
type: 'number',
displayOptions: hideForOpenAiAccountAuth(),
},
{
displayName: 'Top P',
@ -597,6 +700,7 @@ export class LmChatOpenAi implements INodeType {
description:
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
type: 'number',
displayOptions: hideForOpenAiAccountAuth(),
},
{
displayName: 'Conversation ID',
@ -605,12 +709,12 @@ export class LmChatOpenAi implements INodeType {
description:
'The conversation that this response belongs to. Input items and output items from this response are automatically added to this conversation after this response completes.',
type: 'string',
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Prompt Cache Key',
@ -619,12 +723,12 @@ export class LmChatOpenAi implements INodeType {
default: '',
description:
'Used by OpenAI to cache responses for similar requests to optimize your cache hit rates',
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Safety Identifier',
@ -633,12 +737,12 @@ export class LmChatOpenAi implements INodeType {
default: '',
description:
"A stable identifier used to help detect users of your application that may be violating OpenAI's usage policies. The IDs should be a string that uniquely identifies each user.",
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Service Tier',
@ -652,12 +756,12 @@ export class LmChatOpenAi implements INodeType {
{ name: 'Default', value: 'default' },
{ name: 'Priority', value: 'priority' },
],
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Metadata',
@ -666,12 +770,12 @@ export class LmChatOpenAi implements INodeType {
description:
'Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.',
default: '{}',
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Top Logprobs',
@ -684,12 +788,12 @@ export class LmChatOpenAi implements INodeType {
minValue: 0,
maxValue: 20,
},
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
{
displayName: 'Prompt',
@ -727,12 +831,12 @@ export class LmChatOpenAi implements INodeType {
],
},
],
displayOptions: {
displayOptions: hideForOpenAiAccountAuth({
show: {
'@version': [{ _cnd: { gte: 1.3 } }],
'/responsesApiEnabled': [true],
},
},
}),
},
],
},
@ -740,13 +844,22 @@ export class LmChatOpenAi implements INodeType {
};
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = await this.getCredentials('openAiApi');
const authentication = this.getNodeParameter('authentication', itemIndex, 'apiKey');
const credentials = await this.getCredentials(getOpenAiCredentialType(authentication));
const version = this.getNode().typeVersion;
const modelName =
version >= 1.2
? (this.getNodeParameter('model.value', itemIndex) as string)
: (this.getNodeParameter('model', itemIndex) as string);
authentication === 'oAuth2'
? (normalizeModelName(
this.getNodeParameter(
'openAiAccountModel.value',
itemIndex,
OPENAI_ACCOUNT_DEFAULT_MODEL,
),
) ?? OPENAI_ACCOUNT_DEFAULT_MODEL)
: version >= 1.2
? (this.getNodeParameter('model.value', itemIndex) as string)
: (this.getNodeParameter('model', itemIndex) as string);
const responsesApiEnabled = this.getNodeParameter('responsesApiEnabled', itemIndex, false);
@ -757,6 +870,26 @@ export class LmChatOpenAi implements INodeType {
const configuration: ClientOptions = {
defaultHeaders,
};
const timeout = options.timeout;
if (authentication === 'oAuth2') {
return {
response: new OpenAiAccountChatModel({
accessToken: getOpenAiApiKey(credentials),
model: modelName,
timeout,
dispatcher: getProxyAgent('https://chatgpt.com/backend-api', {
headersTimeout: timeout,
bodyTimeout: timeout,
}),
maxOutputTokens: normalizeOpenAiAccountMaxOutputTokens(options.maxTokens),
reasoningEffort: isOpenAiAccountReasoningEffort(options.reasoningEffort)
? options.reasoningEffort
: undefined,
callbacks: [new N8nLlmTracing(this)],
}),
};
}
if (options.baseURL) {
checkDomainRestrictions(this, credentials, options.baseURL);
@ -765,7 +898,6 @@ export class LmChatOpenAi implements INodeType {
configuration.baseURL = credentials.url as string;
}
const timeout = options.timeout;
configuration.fetchOptions = {
dispatcher: getProxyAgent(configuration.baseURL ?? 'https://api.openai.com/v1', {
headersTimeout: timeout,
@ -799,7 +931,7 @@ export class LmChatOpenAi implements INodeType {
]);
const fields: ChatOpenAIFields = {
apiKey: credentials.apiKey as string,
apiKey: getOpenAiApiKey(credentials),
model: modelName,
...includedOptions,
timeout,

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,4 @@
import { getProxyAgent } from '@n8n/ai-utilities';
import type { ILoadOptionsFunctions } from 'n8n-workflow';
import OpenAI from 'openai';
import type { Mocked, MockedClass } from 'vitest';
@ -5,12 +6,26 @@ import type { Mocked, MockedClass } from 'vitest';
import { searchModels } from '../loadModels';
vi.mock('openai');
vi.mock('@n8n/ai-utilities');
const mockedGetProxyAgent = vi.mocked(getProxyAgent);
const JWT_ACCOUNT_CLAIM = 'https://api.openai.com/auth';
function makeOpenAiAccountToken(accountId: string) {
const payload = Buffer.from(
JSON.stringify({ [JWT_ACCOUNT_CLAIM]: { chatgpt_account_id: accountId } }),
).toString('base64url');
return `header.${payload}.signature`;
}
describe('searchModels', () => {
let mockContext: Mocked<ILoadOptionsFunctions>;
let mockOpenAI: Mocked<typeof OpenAI>;
beforeEach(() => {
mockedGetProxyAgent.mockReturnValue({} as never);
mockContext = {
getCredentials: vi.fn().mockResolvedValue({
apiKey: 'test-api-key',
@ -51,6 +66,7 @@ describe('searchModels', () => {
});
afterEach(() => {
vi.restoreAllMocks();
vi.clearAllMocks();
});
@ -87,6 +103,52 @@ describe('searchModels', () => {
);
});
it('should fetch Codex account models with OAuth token-backed credentials', async () => {
const accessToken = makeOpenAiAccountToken('account-id');
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValueOnce({
ok: true,
json: async () => ({
models: [
{ slug: 'hidden-model', visibility: 'hidden', priority: 1 },
{ slug: 'gpt-5.4-mini', visibility: 'list', priority: 2 },
{ slug: 'gpt-5.4', visibility: 'list', priority: 1 },
],
}),
} as Response);
mockContext.getCredentials.mockResolvedValueOnce({
oauthTokenData: {
access_token: accessToken,
},
url: 'https://test-url.com',
});
mockContext.getNodeParameter.mockImplementation((parameterName: string) => {
if (parameterName === 'authentication') return 'oAuth2';
return '';
});
const result = await searchModels.call(mockContext);
expect(mockContext.getCredentials).toHaveBeenCalledWith('openAiOAuth2Api');
expect(mockOpenAI).not.toHaveBeenCalled();
expect(fetchSpy).toHaveBeenCalledWith(
'https://chatgpt.com/backend-api/codex/models?client_version=1.0.0',
{
headers: expect.objectContaining({
Authorization: `Bearer ${accessToken}`,
'chatgpt-account-id': 'account-id',
}),
dispatcher: {},
},
);
expect(mockedGetProxyAgent).toHaveBeenCalledWith(
'https://chatgpt.com/backend-api/codex/models?client_version=1.0.0',
);
expect(result.results).toEqual([
{ name: 'gpt-5.4', value: 'gpt-5.4' },
{ name: 'gpt-5.4-mini', value: 'gpt-5.4-mini' },
]);
});
it('should use default OpenAI URL if no custom URL provided', async () => {
mockContext.getCredentials = vi.fn().mockResolvedValue({
apiKey: 'test-api-key',

View File

@ -1,17 +1,112 @@
import { getProxyAgent } from '@n8n/ai-utilities';
import { AiConfig } from '@n8n/config';
import { Container } from '@n8n/di';
import type { ILoadOptionsFunctions, INodeListSearchResult } from 'n8n-workflow';
import type {
ICredentialDataDecryptedObject,
ILoadOptionsFunctions,
INodeListSearchResult,
} from 'n8n-workflow';
import {
getOpenAiApiKey,
getOpenAiCredentialType,
} from 'n8n-nodes-base/dist/credentials/OpenAiApi.credentials';
import OpenAI from 'openai';
import { mergeCustomHeaders } from '../../../../utils/helpers';
import { shouldIncludeModel } from '../../../vendors/OpenAi/helpers/modelFiltering';
const OPENAI_ACCOUNT_MODELS_URL =
'https://chatgpt.com/backend-api/codex/models?client_version=1.0.0';
const JWT_ACCOUNT_CLAIM = 'https://api.openai.com/auth';
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value);
}
function hasModelSlug(model: Record<string, unknown>): model is Record<string, unknown> & {
slug: string;
} {
return typeof model.slug === 'string' && model.slug.trim().length > 0;
}
function extractChatGptAccountId(token: string): string | undefined {
const parts = token.split('.');
if (parts.length !== 3) return undefined;
try {
const payload: unknown = JSON.parse(Buffer.from(parts[1], 'base64url').toString('utf8'));
if (!isRecord(payload)) return undefined;
const claim = payload[JWT_ACCOUNT_CLAIM];
if (!isRecord(claim)) return undefined;
const accountId = claim.chatgpt_account_id;
return typeof accountId === 'string' && accountId ? accountId : undefined;
} catch {
return undefined;
}
}
async function searchOpenAiAccountModels(
credentials: ICredentialDataDecryptedObject,
filter?: string,
): Promise<INodeListSearchResult> {
const accessToken = getOpenAiApiKey(credentials);
const headers: Record<string, string> = {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
};
const accountId = extractChatGptAccountId(accessToken);
if (accountId) {
headers['chatgpt-account-id'] = accountId;
}
const response = await fetch(OPENAI_ACCOUNT_MODELS_URL, {
headers,
dispatcher: getProxyAgent(OPENAI_ACCOUNT_MODELS_URL),
} as RequestInit);
if (!response.ok) {
throw new Error(`OpenAI account model discovery failed: HTTP ${response.status}`);
}
const payload: unknown = await response.json();
const models = isRecord(payload) && Array.isArray(payload.models) ? payload.models : [];
const filteredModels = models
.filter(isRecord)
.filter(hasModelSlug)
.filter((model) => (model.visibility ?? 'list') === 'list')
.filter((model) => {
if (!filter) return true;
return model.slug.toLowerCase().includes(filter.toLowerCase());
})
.sort((left, right) => {
const leftPriority =
typeof left.priority === 'number' ? left.priority : Number.MAX_SAFE_INTEGER;
const rightPriority =
typeof right.priority === 'number' ? right.priority : Number.MAX_SAFE_INTEGER;
return leftPriority - rightPriority;
});
return {
results: filteredModels.map((model) => ({
name: model.slug,
value: model.slug,
})),
};
}
export async function searchModels(
this: ILoadOptionsFunctions,
filter?: string,
): Promise<INodeListSearchResult> {
const credentials = await this.getCredentials('openAiApi');
const authentication = this.getNodeParameter('authentication', 'apiKey');
const credentials = await this.getCredentials(getOpenAiCredentialType(authentication));
if (authentication === 'oAuth2') {
return await searchOpenAiAccountModels(credentials, filter);
}
const baseURL =
(this.getNodeParameter('options.baseURL', '') as string) ||
(credentials.url as string) ||
@ -21,7 +116,7 @@ export async function searchModels(
const openai = new OpenAI({
baseURL,
apiKey: credentials.apiKey as string,
apiKey: getOpenAiApiKey(credentials),
fetchOptions: {
dispatcher: getProxyAgent(baseURL),
},

View File

@ -10,6 +10,7 @@ import type { Mocked } from 'vitest';
import * as common from '../LMChatOpenAi/common';
import { LmChatOpenAi } from '../LMChatOpenAi/LmChatOpenAi.node';
import { OpenAiAccountChatModel } from '../LMChatOpenAi/OpenAiAccountChatModel';
vi.mock('@langchain/openai');
vi.mock('@n8n/ai-utilities');
@ -21,6 +22,15 @@ const mockedMakeN8nLlmFailedAttemptHandler = vi.mocked(makeN8nLlmFailedAttemptHa
const mockedCommon = vi.mocked(common);
const mockedGetProxyAgent = vi.mocked(getProxyAgent);
const { openAiDefaultHeaders: defaultHeaders } = Container.get(AiConfig);
const JWT_ACCOUNT_CLAIM = 'https://api.openai.com/auth';
function makeOpenAiAccountToken(accountId: string) {
const payload = Buffer.from(
JSON.stringify({ [JWT_ACCOUNT_CLAIM]: { chatgpt_account_id: accountId } }),
).toString('base64url');
return `test-header.${payload}.test-signature`;
}
describe('LmChatOpenAi', () => {
let lmChatOpenAi: LmChatOpenAi;
@ -81,6 +91,20 @@ describe('LmChatOpenAi', () => {
{
name: 'openAiApi',
required: true,
displayOptions: {
show: {
authentication: ['apiKey'],
},
},
},
{
name: 'openAiOAuth2Api',
required: true,
displayOptions: {
show: {
authentication: ['oAuth2'],
},
},
},
]);
});
@ -97,6 +121,7 @@ describe('LmChatOpenAi', () => {
// Mock getNodeParameter to handle the proper parameter names for v1.2
mockContext.getNodeParameter = vi.fn().mockImplementation((paramName: string) => {
if (paramName === 'authentication') return 'apiKey';
if (paramName === 'model.value') return 'gpt-4o-mini';
if (paramName === 'options') return {};
return undefined;
@ -235,6 +260,88 @@ describe('LmChatOpenAi', () => {
);
});
it('should create OpenAI account model with OAuth token-backed credentials', async () => {
const mockContext = setupMockContext();
mockContext.getCredentials.mockResolvedValue({
oauthTokenData: {
access_token: makeOpenAiAccountToken('account-1'),
},
});
mockContext.getNodeParameter = vi.fn().mockImplementation((paramName: string) => {
if (paramName === 'authentication') return 'oAuth2';
if (paramName === 'model.value') return 'gpt-5-mini';
if (paramName === 'options') return {};
return undefined;
});
const result = await lmChatOpenAi.supplyData.call(mockContext, 0);
expect(mockContext.getCredentials).toHaveBeenCalledWith('openAiOAuth2Api');
expect(MockedChatOpenAI).not.toHaveBeenCalled();
expect(mockedGetProxyAgent).toHaveBeenCalledWith('https://chatgpt.com/backend-api', {
headersTimeout: undefined,
bodyTimeout: undefined,
});
expect(result.response).toBeInstanceOf(OpenAiAccountChatModel);
expect(result.response).toMatchObject({ model: 'gpt-5.4-mini' });
});
it('should use the selected OpenAI account model for OAuth credentials', async () => {
const mockContext = setupMockContext();
mockContext.getCredentials.mockResolvedValue({
oauthTokenData: {
access_token: makeOpenAiAccountToken('account-1'),
},
});
mockContext.getNodeParameter = vi.fn().mockImplementation((paramName: string) => {
if (paramName === 'authentication') return 'oAuth2';
if (paramName === 'openAiAccountModel.value') return 'gpt-5.3-codex';
if (paramName === 'model.value') return 'gpt-5-mini';
if (paramName === 'options') return {};
return undefined;
});
const result = await lmChatOpenAi.supplyData.call(mockContext, 0);
expect(result.response).toBeInstanceOf(OpenAiAccountChatModel);
expect(result.response).toMatchObject({ model: 'gpt-5.3-codex' });
});
it('should pass supported OpenAI account options to the OAuth model', async () => {
const mockContext = setupMockContext();
mockContext.getCredentials.mockResolvedValue({
oauthTokenData: {
access_token: makeOpenAiAccountToken('account-1'),
},
});
mockContext.getNodeParameter = vi.fn().mockImplementation((paramName: string) => {
if (paramName === 'authentication') return 'oAuth2';
if (paramName === 'model.value') return 'gpt-5-mini';
if (paramName === 'options')
return {
maxTokens: 1000,
timeout: 45000,
reasoningEffort: 'high',
};
return undefined;
});
const result = await lmChatOpenAi.supplyData.call(mockContext, 0);
expect(result.response).toBeInstanceOf(OpenAiAccountChatModel);
expect(result.response).toMatchObject({
maxOutputTokens: 1000,
timeout: 45000,
reasoningEffort: 'high',
});
});
it('should handle custom headers from credentials', async () => {
const mockContext = setupMockContext();

View File

@ -0,0 +1,388 @@
import { HumanMessage, SystemMessage, AIMessage } from '@langchain/core/messages';
import { z } from 'zod';
import { OpenAiAccountChatModel } from '../LMChatOpenAi/OpenAiAccountChatModel';
const JWT_ACCOUNT_CLAIM = 'https://api.openai.com/auth';
function makeOpenAiAccountToken(accountId: string) {
const payload = Buffer.from(
JSON.stringify({ [JWT_ACCOUNT_CLAIM]: { chatgpt_account_id: accountId } }),
).toString('base64url');
return `test-header.${payload}.test-signature`;
}
const accessToken = makeOpenAiAccountToken('account-1');
function sseResponse(events: Array<Record<string, unknown>>, separator = '\n\n'): Response {
const encoder = new TextEncoder();
const body = new ReadableStream<Uint8Array>({
start(controller) {
for (const event of events) {
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}${separator}`));
}
controller.close();
},
});
return new Response(body, {
status: 200,
headers: { 'content-type': 'text/event-stream' },
});
}
describe('OpenAiAccountChatModel', () => {
const originalFetch = global.fetch;
afterEach(() => {
global.fetch = originalFetch;
jest.restoreAllMocks();
});
it('posts chat messages to the ChatGPT Codex responses backend', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse([
{ type: 'response.output_text.delta', delta: 'hello' },
{
type: 'response.completed',
response: { id: 'resp_1', usage: { input_tokens: 7, output_tokens: 2 }, output: [] },
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({
accessToken,
model: 'gpt-5-mini',
timeout: 1000,
maxOutputTokens: 128,
sessionId: 'session-1',
installationId: 'install-1',
});
const result = await model.invoke([
new SystemMessage('Be short'),
new HumanMessage('Say hello'),
]);
expect(result.content).toBe('hello');
expect(fetchMock).toHaveBeenCalledWith(
'https://chatgpt.com/backend-api/codex/responses',
expect.objectContaining({
method: 'POST',
headers: expect.objectContaining({
Authorization: `Bearer ${accessToken}`,
'chatgpt-account-id': 'account-1',
'OpenAI-Beta': 'responses=experimental',
originator: 'codex_cli_rs',
session_id: 'session-1',
'x-client-request-id': 'session-1',
'x-codex-window-id': 'session-1:0',
'x-codex-installation-id': 'install-1',
}),
}),
);
const request = fetchMock.mock.calls[0]?.[1] as { body?: string };
expect(JSON.parse(request.body ?? '{}')).toMatchObject({
model: 'gpt-5-mini',
store: false,
stream: true,
max_output_tokens: 128,
instructions: 'Be short',
input: [{ role: 'user', content: [{ type: 'input_text', text: 'Say hello' }] }],
client_metadata: {
'x-codex-installation-id': 'install-1',
'x-codex-window-id': 'session-1:0',
},
tool_choice: 'auto',
});
});
it('parses CRLF-framed SSE responses', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse(
[
{ type: 'response.output_text.delta', delta: 'hello' },
{
type: 'response.completed',
response: { id: 'resp_1', usage: { input_tokens: 7, output_tokens: 2 }, output: [] },
},
],
'\r\n\r\n',
),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({ accessToken, model: 'gpt-5-mini' });
const result = await model.invoke([new HumanMessage('Say hello')]);
expect(result.content).toBe('hello');
});
it('maps bound LangChain tools to Codex function tools and returns tool calls', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse([
{
type: 'response.output_item.added',
item: {
type: 'function_call',
call_id: 'call_1',
name: 'lookup_weather',
arguments: '{}',
},
},
{
type: 'response.function_call_arguments.done',
call_id: 'call_1',
arguments: '{"city":"Paris"}',
},
{
type: 'response.completed',
response: { id: 'resp_2', usage: { input_tokens: 8, output_tokens: 3 }, output: [] },
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({ accessToken, model: 'gpt-5-mini' }).bindTools([
{
name: 'lookup_weather',
description: 'Look up weather',
schema: z.object({
city: z.string().optional(),
}),
},
]);
const result = await model.invoke([new HumanMessage('weather in Paris')]);
expect(result).toBeInstanceOf(AIMessage);
expect(result.tool_calls).toEqual([
{
id: 'call_1',
name: 'lookup_weather',
args: { city: 'Paris' },
},
]);
const request = fetchMock.mock.calls[0]?.[1] as { body?: string };
expect(JSON.parse(request.body ?? '{}')).toMatchObject({
tools: [
{
type: 'function',
name: 'lookup_weather',
parameters: {
type: 'object',
properties: {
city: { type: ['string', 'null'] },
},
required: ['city'],
additionalProperties: false,
},
strict: true,
},
],
tool_choice: 'auto',
parallel_tool_calls: true,
});
});
it('sends strict object parameters for tools without explicit properties', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse([
{
type: 'response.completed',
response: { id: 'resp_3', usage: { input_tokens: 4, output_tokens: 1 }, output: [] },
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({ accessToken, model: 'gpt-5-mini' }).bindTools([
{
name: 'Code_Tool',
description: 'Run simple code',
schema: {
type: 'object',
},
},
]);
await model.invoke([new HumanMessage('uppercase query')]);
const request = fetchMock.mock.calls[0]?.[1] as { body?: string };
expect(JSON.parse(request.body ?? '{}')).toMatchObject({
tools: [
{
type: 'function',
name: 'Code_Tool',
parameters: {
type: 'object',
properties: {},
required: [],
additionalProperties: false,
},
strict: true,
},
],
});
});
it('preserves simple LangChain tool input schemas', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse([
{
type: 'response.completed',
response: { id: 'resp_4', usage: { input_tokens: 4, output_tokens: 1 }, output: [] },
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({ accessToken, model: 'gpt-5-mini' }).bindTools([
{
name: 'Code_Tool',
description: 'Run simple code',
schema: z.object({ input: z.string().optional() }).transform((value) => value.input),
},
]);
await model.invoke([new HumanMessage('uppercase query')]);
const request = fetchMock.mock.calls[0]?.[1] as { body?: string };
expect(JSON.parse(request.body ?? '{}')).toMatchObject({
tools: [
{
type: 'function',
name: 'Code_Tool',
parameters: {
type: 'object',
properties: {
input: { type: ['string', 'null'] },
},
required: ['input'],
additionalProperties: false,
},
strict: true,
},
],
});
});
it('uses previous_response_id for incremental follow-up prompts', async () => {
const fetchMock = jest
.fn()
.mockResolvedValueOnce(
sseResponse([
{
type: 'response.completed',
response: { id: 'resp_1', usage: { input_tokens: 3, output_tokens: 1 }, output: [] },
},
]),
)
.mockResolvedValueOnce(
sseResponse([
{ type: 'response.output_text.delta', delta: 'next' },
{
type: 'response.completed',
response: { id: 'resp_2', usage: { input_tokens: 2, output_tokens: 1 }, output: [] },
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({
accessToken,
model: 'gpt-5-mini',
sessionId: 'session-1',
installationId: 'install-1',
});
await model.invoke([new SystemMessage('Be short'), new HumanMessage('first')]);
await model.invoke([
new SystemMessage('Be short'),
new HumanMessage('first'),
new AIMessage(''),
new HumanMessage('second'),
]);
const secondRequest = fetchMock.mock.calls[1]?.[1] as { body?: string };
expect(JSON.parse(secondRequest.body ?? '{}')).toMatchObject({
previous_response_id: 'resp_1',
input: [{ role: 'user', content: [{ type: 'input_text', text: 'second' }] }],
});
});
it('streams tool argument deltas for LangChain streaming agents', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse([
{
type: 'response.output_item.added',
item: { type: 'function_call', call_id: 'call_1', name: 'lookup_weather', arguments: '' },
},
{
type: 'response.function_call_arguments.delta',
call_id: 'call_1',
delta: '{"city"',
},
{
type: 'response.function_call_arguments.delta',
call_id: 'call_1',
delta: ':"Paris"}',
},
{
type: 'response.completed',
response: { id: 'resp_3', usage: { input_tokens: 6, output_tokens: 4 }, output: [] },
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({ accessToken, model: 'gpt-5-mini' });
const chunks = [];
for await (const chunk of await model.stream([new HumanMessage('weather in Paris')])) {
chunks.push(chunk);
}
expect(chunks.some((chunk) => chunk.tool_call_chunks?.[0]?.args === '{"city"')).toBe(true);
expect(chunks.some((chunk) => chunk.tool_calls?.[0]?.args.city === 'Paris')).toBe(true);
});
it('preserves provider finish metadata while streaming', async () => {
const fetchMock = jest.fn().mockResolvedValue(
sseResponse([
{ type: 'response.output_text.delta', delta: 'partial' },
{
type: 'response.completed',
response: {
id: 'resp_4',
status: 'incomplete',
incomplete_details: { reason: 'max_output_tokens' },
usage: { input_tokens: 6, output_tokens: 4 },
output: [{ type: 'message', phase: 'final_answer' }],
},
},
]),
);
global.fetch = fetchMock;
const model = new OpenAiAccountChatModel({ accessToken, model: 'gpt-5-mini' });
const chunks = [];
for await (const chunk of await model.stream([new HumanMessage('write a long answer')])) {
chunks.push(chunk);
}
const finishChunk = chunks.find((chunk) => chunk.response_metadata?.finishReason === 'length');
expect(finishChunk?.additional_kwargs).toMatchObject({ phase: 'final_answer' });
expect(finishChunk?.usage_metadata).toMatchObject({
input_tokens: 6,
output_tokens: 4,
total_tokens: 10,
});
});
});

View File

@ -231,6 +231,19 @@ describe('LoadNodesAndCredentials', () => {
expect(result).toBe(true);
});
it('should return false when credential skips HTTP Request domain restrictions', () => {
const credential = {
name: 'testCredential',
displayName: 'Test Credential',
extends: ['oAuth2Api'],
__skipHttpRequestDomainRestrictions: true,
properties: [],
};
const result = (instance as any).shouldAddDomainRestrictions(credential);
expect(result).toBe(false);
});
it('should return true for credentials extending oAuth1Api', () => {
const credential = {
name: 'testCredential',

View File

@ -49,5 +49,11 @@ describe('AuthService Browser ID Whitelist', () => {
expect(skipEndpoints).toContain('/rest/oauth1-credential/callback');
expect(skipEndpoints).toContain('/rest/oauth2-credential/callback');
});
it('should include OpenAI device auth page in the skip browser ID check endpoints', () => {
const skipEndpoints = (authService as any).skipBrowserIdCheckEndpoints;
expect(skipEndpoints).toContain('/rest/openai-oauth2-credential/device-auth');
});
});
});

View File

@ -83,6 +83,7 @@ export class AuthService {
// oAuth callback urls aren't called by the frontend. therefore we can't send custom header on these requests
`/${restEndpoint}/oauth1-credential/callback`,
`/${restEndpoint}/oauth2-credential/callback`,
`/${restEndpoint}/openai-oauth2-credential/device-auth`,
// Skip browser ID check for type files
'/types/nodes.json',

View File

@ -75,6 +75,27 @@ describe('OAuth2CredentialController', () => {
userId: '123',
});
});
it('should return the OpenAI device auth page URL for OpenAI OAuth2 credentials', async () => {
const mockResolvedCredential = mock<CredentialsEntity>({
id: 'openai-credential-id',
type: 'openAiOAuth2Api',
});
oauthService.getCredential.mockResolvedValueOnce(mockResolvedCredential);
oauthService.getBaseUrl.mockReturnValue('http://localhost:5678/rest/oauth2-credential');
const req = mock<OAuthRequest.OAuth2Credential.Auth>({
user: mock<User>({ id: '123' }),
query: { id: 'openai-credential-id' },
});
const authUri = await controller.getAuthUri(req);
expect(authUri).toBe(
'http://localhost:5678/rest/openai-oauth2-credential/device-auth?id=openai-credential-id',
);
expect(oauthService.generateAOauth2AuthUri).not.toHaveBeenCalled();
});
});
describe('handleCallback', () => {

View File

@ -0,0 +1,136 @@
import type { AuthenticatedRequest, CredentialsEntity, User } from '@n8n/db';
import { mock } from 'jest-mock-extended';
import type { Response } from 'express';
import type { CredentialsFinderService } from '@/credentials/credentials-finder.service';
import { OpenAiOAuth2DeviceController } from '@/controllers/oauth/openai-oauth2-device.controller';
import type { OauthService } from '@/oauth/oauth.service';
describe('OpenAiOAuth2DeviceController', () => {
const credentialsFinderService = mock<CredentialsFinderService>();
const oauthService = mock<OauthService>();
const controller = new OpenAiOAuth2DeviceController(credentialsFinderService, oauthService);
const originalFetch = global.fetch;
beforeEach(() => {
jest.clearAllMocks();
credentialsFinderService.findCredentialForUser.mockResolvedValue({
id: 'credential-id',
type: 'openAiOAuth2Api',
} as CredentialsEntity);
});
afterEach(() => {
global.fetch = originalFetch;
});
describe('renderDeviceAuthPage', () => {
it('renders a device login page that sends the browser ID header while polling', async () => {
global.fetch = jest.fn().mockResolvedValue(
new Response(
JSON.stringify({
device_auth_id: 'device-auth-id',
user_code: 'USER-CODE',
interval: '5',
expires_at: '2026-04-27T11:30:29.549956+00:00',
}),
{ status: 200, headers: { 'Content-Type': 'application/json' } },
),
);
const req = mock<AuthenticatedRequest<{}, {}, {}, { id?: string }>>({
query: { id: 'credential-id' },
user: mock<User>(),
});
const res = mock<Response>();
res.type.mockReturnValue(res);
res.send.mockReturnValue(res);
await controller.renderDeviceAuthPage(req, res);
const html = res.send.mock.calls[0][0] as string;
expect(html).toContain("'browser-id': getBrowserId()");
expect(html).toContain("credentials: 'same-origin'");
expect(html).toContain('"expiresAt":1777289429549');
expect(html).toContain('Continue after authorization');
expect(html).toContain('class="primary-action"');
expect(html).toContain('Open OpenAI device login');
expect(html).toContain('background: #ff6d5a');
expect(html).toContain('const payload = result.data ?? result');
expect(html).toContain("verificationLink.addEventListener('click'");
expect(html).toContain("continueButton.addEventListener('click'");
expect(html).not.toContain('setTimeout(poll, challenge.intervalMs);\\n\\t\\t\\t}');
});
it('escapes challenge data before interpolating it into the inline script', async () => {
global.fetch = jest.fn().mockResolvedValue(
new Response(
JSON.stringify({
device_auth_id: 'device-auth-id</script>',
user_code: 'USER-CODE',
interval: '5',
expires_at: '2026-04-27T11:30:29.549956+00:00',
}),
{ status: 200, headers: { 'Content-Type': 'application/json' } },
),
);
const req = mock<AuthenticatedRequest<{}, {}, {}, { id?: string }>>({
query: { id: 'credential-id</script><script>alert(1)</script>' },
user: mock<User>(),
});
const res = mock<Response>();
res.type.mockReturnValue(res);
res.send.mockReturnValue(res);
await controller.renderDeviceAuthPage(req, res);
const html = res.send.mock.calls[0][0] as string;
expect(html).toContain('credential-id\\u003c/script\\u003e');
expect(html).toContain('device-auth-id\\u003c/script\\u003e');
expect(html).not.toContain('credential-id</script><script>alert(1)</script>');
expect(html).not.toContain('device-auth-id</script>');
});
});
describe('completeDeviceAuth', () => {
it('reads the device completion payload from the request body', async () => {
global.fetch = jest
.fn()
.mockResolvedValueOnce(
new Response(
JSON.stringify({
authorization_code: 'authorization-code',
code_verifier: 'code-verifier',
}),
{ status: 200, headers: { 'Content-Type': 'application/json' } },
),
)
.mockResolvedValueOnce(
new Response(JSON.stringify({ access_token: 'access-token' }), {
status: 200,
headers: { 'Content-Type': 'application/json' },
}),
);
const req = mock<AuthenticatedRequest>({
user: mock<User>(),
});
const res = mock<Response>();
const payload = {
id: 'credential-id',
deviceAuthId: 'device-auth-id',
userCode: 'USER-CODE',
};
await expect(controller.completeDeviceAuth(req, res, payload)).resolves.toEqual({
status: 'success',
});
expect(oauthService.encryptAndSaveData).toHaveBeenCalledWith(
expect.objectContaining({ id: 'credential-id' }),
{ oauthTokenData: { access_token: 'access-token' } },
['csrfSecret', 'codeVerifier'],
);
});
});
});

View File

@ -14,6 +14,8 @@ import { OAuthJweServiceProxy } from '@/oauth/oauth-jwe-service.proxy';
import { OauthService, OauthVersion, skipAuthOnOAuthCallback } from '@/oauth/oauth.service';
import { OAuthRequest } from '@/requests';
const OPENAI_OAUTH2_CREDENTIAL_TYPE = 'openAiOAuth2Api';
@RestController('/oauth2-credential')
export class OAuth2CredentialController {
constructor(
@ -28,6 +30,10 @@ export class OAuth2CredentialController {
async getAuthUri(req: OAuthRequest.OAuth2Credential.Auth): Promise<string> {
const credential = await this.oauthService.getCredential(req);
if (credential.type === OPENAI_OAUTH2_CREDENTIAL_TYPE) {
return this.getOpenAiDeviceAuthUrl(credential.id);
}
const uri = await this.oauthService.generateAOauth2AuthUri(credential, {
cid: credential.id,
origin: 'static-credential',
@ -36,6 +42,14 @@ export class OAuth2CredentialController {
return uri;
}
private getOpenAiDeviceAuthUrl(credentialId: string): string {
const restUrl = this.oauthService
.getBaseUrl(OauthVersion.V2)
.replace(/\/oauth2-credential$/, '');
return `${restUrl}/openai-oauth2-credential/device-auth?id=${encodeURIComponent(credentialId)}`;
}
/** Verify and store app code. Generate access tokens and store for respective credential */
@Get('/callback', { usesTemplates: true, skipAuth: skipAuthOnOAuthCallback })
async handleCallback(req: OAuthRequest.OAuth2Credential.Callback, res: Response) {

View File

@ -0,0 +1,457 @@
import { Z } from '@n8n/api-types';
import { Body, Get, Post, RestController } from '@n8n/decorators';
import type { AuthenticatedRequest } from '@n8n/db';
import type { Response as ExpressResponse } from 'express';
import type { ICredentialDataDecryptedObject } from 'n8n-workflow';
import { z } from 'zod';
import { CredentialsFinderService } from '@/credentials/credentials-finder.service';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { OauthService } from '@/oauth/oauth.service';
const OPENAI_OAUTH2_CREDENTIAL_TYPE = 'openAiOAuth2Api';
const OPENAI_CODEX_CLIENT_ID = 'app_EMoamEEZ73f0CkXaXp7hrann';
const OPENAI_DEVICE_AUTHORIZATION_ENDPOINT =
'https://auth.openai.com/api/accounts/deviceauth/usercode';
const OPENAI_DEVICE_TOKEN_ENDPOINT = 'https://auth.openai.com/api/accounts/deviceauth/token';
const OPENAI_TOKEN_ENDPOINT = 'https://auth.openai.com/oauth/token';
const OPENAI_DEVICE_REDIRECT_URI = 'https://auth.openai.com/deviceauth/callback';
const OPENAI_DEVICE_VERIFICATION_URI = 'https://auth.openai.com/codex/device';
const deviceAuthorizationResponseSchema = z.object({
device_auth_id: z.string().min(1),
user_code: z.string().min(1),
interval: z.string().optional(),
expires_in: z.number().optional(),
expires_at: z.string().optional(),
});
const deviceTokenResponseSchema = z.object({
authorization_code: z.string().min(1),
code_verifier: z.string().min(1),
});
const tokenResponseSchema = z.object({
access_token: z.string().min(1),
refresh_token: z.string().optional(),
id_token: z.string().optional(),
expires_in: z.number().optional(),
token_type: z.string().optional(),
});
class OpenAiDeviceCompleteDto extends Z.class({
id: z.string().min(1),
deviceAuthId: z.string().min(1),
userCode: z.string().min(1),
}) {}
type OpenAiDeviceChallenge = {
deviceAuthId: string;
userCode: string;
verificationUri: string;
intervalMs: number;
expiresAt: number;
};
type OpenAiDeviceAuthRequest = AuthenticatedRequest<{}, {}, {}, { id?: string }>;
@RestController('/openai-oauth2-credential')
export class OpenAiOAuth2DeviceController {
constructor(
private readonly credentialsFinderService: CredentialsFinderService,
private readonly oauthService: OauthService,
) {}
@Get('/device-auth')
async renderDeviceAuthPage(req: OpenAiDeviceAuthRequest, res: ExpressResponse) {
const credentialId = String(req.query.id ?? '');
await this.getOpenAiCredentialForUpdate(req, credentialId);
const challenge = await this.requestDeviceChallenge();
return res.type('html').send(this.renderDevicePage(credentialId, challenge));
}
@Post('/device-complete')
async completeDeviceAuth(
req: AuthenticatedRequest,
_res: ExpressResponse,
@Body payload: OpenAiDeviceCompleteDto,
) {
const { id, deviceAuthId, userCode } = payload;
const credential = await this.getOpenAiCredentialForUpdate(req, id);
const deviceTokenResponse = await fetch(OPENAI_DEVICE_TOKEN_ENDPOINT, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
device_auth_id: deviceAuthId,
user_code: userCode,
}),
});
if (deviceTokenResponse.status === 403 || deviceTokenResponse.status === 404) {
return { status: 'pending' };
}
if (!deviceTokenResponse.ok) {
throw new BadRequestError(
await this.getOpenAiErrorMessage(deviceTokenResponse, 'OpenAI device login failed'),
);
}
const deviceTokenPayload: unknown = await deviceTokenResponse.json();
const deviceToken = deviceTokenResponseSchema.parse(deviceTokenPayload);
const tokenData = await this.exchangeDeviceAuthorizationCode(deviceToken);
await this.oauthService.encryptAndSaveData(credential, { oauthTokenData: tokenData }, [
'csrfSecret',
'codeVerifier',
]);
return { status: 'success' };
}
private async getOpenAiCredentialForUpdate(req: AuthenticatedRequest, credentialId: string) {
if (!credentialId) {
throw new BadRequestError('Required credential ID is missing');
}
const credential = await this.credentialsFinderService.findCredentialForUser(
credentialId,
req.user,
['credential:update'],
);
if (!credential) {
throw new NotFoundError('Credential not found');
}
if (credential.type !== OPENAI_OAUTH2_CREDENTIAL_TYPE) {
throw new BadRequestError('Credential type not supported');
}
return credential;
}
private async requestDeviceChallenge(): Promise<OpenAiDeviceChallenge> {
const response = await fetch(OPENAI_DEVICE_AUTHORIZATION_ENDPOINT, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ client_id: OPENAI_CODEX_CLIENT_ID }),
});
if (!response.ok) {
throw new BadRequestError(
await this.getOpenAiErrorMessage(response, 'OpenAI device login failed'),
);
}
const payload: unknown = await response.json();
const device = deviceAuthorizationResponseSchema.parse(payload);
const intervalSeconds = Number.parseInt(device.interval ?? '5', 10);
const expiresAt = device.expires_at
? Date.parse(device.expires_at)
: Date.now() + (device.expires_in ?? 600) * 1000;
return {
verificationUri: OPENAI_DEVICE_VERIFICATION_URI,
userCode: device.user_code,
deviceAuthId: device.device_auth_id,
intervalMs: Math.max(Number.isFinite(intervalSeconds) ? intervalSeconds : 5, 1) * 1000,
expiresAt: Number.isFinite(expiresAt) ? expiresAt : Date.now() + 600 * 1000,
};
}
private async exchangeDeviceAuthorizationCode(deviceToken: {
authorization_code: string;
code_verifier: string;
}): Promise<ICredentialDataDecryptedObject> {
const body = new URLSearchParams();
body.set('grant_type', 'authorization_code');
body.set('code', deviceToken.authorization_code);
body.set('redirect_uri', OPENAI_DEVICE_REDIRECT_URI);
body.set('client_id', OPENAI_CODEX_CLIENT_ID);
body.set('code_verifier', deviceToken.code_verifier);
const tokenResponse = await fetch(OPENAI_TOKEN_ENDPOINT, {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
body: body.toString(),
});
if (!tokenResponse.ok) {
throw new BadRequestError(
await this.getOpenAiErrorMessage(tokenResponse, 'OpenAI token exchange failed'),
);
}
const tokenPayload: unknown = await tokenResponse.json();
const tokenResponseData = tokenResponseSchema.parse(tokenPayload);
const tokenData: ICredentialDataDecryptedObject = {};
for (const [key, value] of Object.entries(tokenResponseData)) {
if (value !== undefined) {
tokenData[key] = value;
}
}
return tokenData;
}
private async getOpenAiErrorMessage(response: Response, fallback: string) {
try {
const payload: unknown = await response.json();
const parsed = z
.object({
error: z.string().optional(),
error_description: z.string().optional(),
message: z.string().optional(),
})
.safeParse(payload);
if (parsed.success) {
return (
parsed.data.error_description ??
parsed.data.message ??
parsed.data.error ??
`${fallback}: HTTP ${response.status}`
);
}
} catch {}
return `${fallback}: HTTP ${response.status}`;
}
private renderDevicePage(credentialId: string, challenge: OpenAiDeviceChallenge) {
const challengeJson = stringifyForInlineScript({ credentialId, ...challenge });
return `<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>OpenAI OAuth2 Device Login</title>
<style>
body {
box-sizing: border-box;
display: flex;
min-height: 100vh;
margin: 0;
padding: 32px;
align-items: center;
justify-content: center;
background: #f6f7f8;
color: #1d1f21;
font-family: Inter, Arial, sans-serif;
}
main {
width: min(480px, 100%);
background: #fff;
border: 1px solid #d9dee3;
border-radius: 8px;
padding: 28px;
}
h1 {
margin: 0 0 16px;
font-size: 20px;
font-weight: 600;
}
p {
margin: 12px 0;
line-height: 1.5;
}
.code {
margin: 20px 0;
padding: 18px;
border: 1px solid #c9d1d9;
border-radius: 6px;
background: #f3f5f7;
font-family: ui-monospace, SFMono-Regular, Menlo, Consolas, monospace;
font-size: 28px;
font-weight: 700;
text-align: center;
letter-spacing: 2px;
}
.primary-action {
display: flex;
margin: 20px 0 8px;
}
.primary-action a {
display: inline-flex;
width: 100%;
min-height: 44px;
align-items: center;
justify-content: center;
border-radius: 6px;
background: #ff6d5a;
color: #fff;
font-size: 15px;
font-weight: 700;
text-decoration: none;
}
.primary-action a:focus {
outline: 2px solid #ff9a8d;
outline-offset: 2px;
}
button {
display: inline-flex;
margin-top: 8px;
padding: 6px 0;
border: 0;
border-radius: 4px;
background: transparent;
color: #5f6b7a;
font-size: 13px;
font-weight: 500;
text-decoration: underline;
cursor: pointer;
}
button:disabled {
cursor: default;
opacity: 0.65;
}
.status {
margin-top: 18px;
color: #5f6b7a;
font-size: 14px;
}
</style>
</head>
<body>
<main>
<h1>Connect OpenAI Account (ChatGPT)</h1>
<p>Open the OpenAI device login page and enter this code:</p>
<div class="code" id="user-code"></div>
<div class="primary-action">
<a id="verification-link" href="#" target="_blank" rel="noopener">Open OpenAI device login</a>
</div>
<button id="continue-button" type="button">Continue after authorization</button>
<p class="status" id="status">Waiting for you to finish authorization in OpenAI.</p>
</main>
<script>
const challenge = ${challengeJson};
const userCode = document.getElementById('user-code');
const verificationLink = document.getElementById('verification-link');
const continueButton = document.getElementById('continue-button');
const status = document.getElementById('status');
userCode.textContent = challenge.userCode;
verificationLink.href = challenge.verificationUri;
const channel = new BroadcastChannel('oauth-callback');
let pollTimeout;
function getBrowserId() {
const storageKey = 'n8n-browserId';
let browserId = localStorage.getItem(storageKey);
if (!browserId) {
browserId = crypto.randomUUID();
localStorage.setItem(storageKey, browserId);
}
return browserId;
}
function schedulePoll(delayMs) {
clearTimeout(pollTimeout);
pollTimeout = setTimeout(poll, delayMs);
}
async function poll() {
if (Date.now() > challenge.expiresAt) {
status.textContent = 'The device code expired. Close this window and reconnect.';
channel.postMessage('error');
channel.close();
return;
}
try {
const response = await fetch('./device-complete', {
method: 'POST',
credentials: 'same-origin',
headers: {
'Content-Type': 'application/json',
'browser-id': getBrowserId(),
},
body: JSON.stringify({
id: challenge.credentialId,
deviceAuthId: challenge.deviceAuthId,
userCode: challenge.userCode,
}),
});
const responseText = await response.text();
let result = {};
try {
result = responseText ? JSON.parse(responseText) : {};
} catch (error) {
status.textContent = 'Connection failed: HTTP ' + response.status + ' ' + responseText;
channel.postMessage('error');
channel.close();
continueButton.disabled = false;
return;
}
const payload = result.data ?? result;
if (response.ok && payload.status === 'success') {
status.textContent = 'Connection successful. This window will close automatically.';
channel.postMessage('success');
channel.close();
setTimeout(() => window.close(), 1500);
return;
}
if (response.ok && payload.status === 'pending') {
status.textContent = 'Waiting for OpenAI authorization to complete...';
schedulePoll(challenge.intervalMs);
return;
}
status.textContent =
payload.message ??
payload.error?.message ??
'Connection failed: HTTP ' + response.status + ' ' + responseText;
channel.postMessage('error');
channel.close();
continueButton.disabled = false;
} catch (error) {
status.textContent =
'Connection failed: ' + (error instanceof Error ? error.message : String(error));
channel.postMessage('error');
channel.close();
continueButton.disabled = false;
}
}
verificationLink.addEventListener('click', () => {
continueButton.disabled = true;
status.textContent = 'Waiting for OpenAI authorization to complete...';
schedulePoll(challenge.intervalMs);
});
continueButton.addEventListener('click', () => {
continueButton.disabled = true;
status.textContent = 'Checking authorization...';
void poll();
});
</script>
</body>
</html>`;
}
}
function stringifyForInlineScript(value: unknown): string {
return JSON.stringify(value).replace(/[<>&\u2028\u2029]/g, (character) => {
switch (character) {
case '<':
return '\\u003c';
case '>':
return '\\u003e';
case '&':
return '\\u0026';
case '\u2028':
return '\\u2028';
case '\u2029':
return '\\u2029';
default:
return character;
}
});
}

View File

@ -732,6 +732,10 @@ export class LoadNodesAndCredentials {
// Handle both credential types by extracting the actual ICredentialType
const credentialType = 'type' in credential ? credential.type : credential;
if (credentialType.__skipHttpRequestDomainRestrictions === true) {
return false;
}
return (
credentialType.authenticate !== undefined ||
credentialType.genericAuth === true ||

View File

@ -41,6 +41,7 @@ import '@/controllers/me.controller';
import '@/controllers/node-types.controller';
import '@/controllers/oauth/oauth1-credential.controller';
import '@/controllers/oauth/oauth2-credential.controller';
import '@/controllers/oauth/openai-oauth2-device.controller';
import '@/controllers/orchestration.controller';
import '@/controllers/owner.controller';
import '@/controllers/password-reset.controller';

View File

@ -431,6 +431,39 @@ describe('CredentialConfig', () => {
expect(screen.queryByTestId('copy-input')).not.toBeInTheDocument();
});
it('should not show redirect URL when the credential hides the OAuth callback URL', () => {
renderComponent({
pinia: createTestingPinia({
initialState: {
[STORES.SETTINGS]: {
settings: { enterprise: { sharing: false, externalSecrets: false } },
},
[STORES.ROOT]: {
oauthCallbackUrls: { oauth2: 'https://example.com/callback' },
},
},
}),
props: {
isManaged: false,
mode: 'new',
credentialType: {
...mockCredentialType,
name: 'openAiOAuth2Api',
extends: ['oAuth2Api'],
__hideOAuthRedirectUrl: true,
},
credentialProperties: [],
credentialData: {} as ICredentialDataDecryptedObject,
isOAuthType: true,
managedOauthAvailable: false,
useCustomOauth: false,
credentialPermissions: writePermissions,
},
});
expect(screen.queryByTestId('copy-input')).not.toBeInTheDocument();
});
it('should not show redirect URL for non-OAuth credentials', () => {
renderComponent({
props: {

View File

@ -233,6 +233,10 @@ const isManagedOAuth = computed(
() => props.isOAuthType && props.managedOauthAvailable && !props.useCustomOauth,
);
const shouldShowOAuthRedirectUrl = computed(
() => props.isOAuthType && !isManagedOAuth.value && !props.credentialType.__hideOAuthRedirectUrl,
);
function onDataChange(event: IUpdateInformation): void {
emit('update', event);
}
@ -439,7 +443,7 @@ watch(showOAuthSuccessBanner, (newValue, oldValue) => {
</div>
<CopyInput
v-if="isOAuthType && !isManagedOAuth"
v-if="shouldShowOAuthRedirectUrl"
:label="i18n.baseText('credentialEdit.credentialConfig.oAuthRedirectUrl')"
:value="oAuthCallbackUrl"
:copy-button-text="i18n.baseText('credentialEdit.credentialConfig.clickToCopy')"

View File

@ -282,6 +282,94 @@ describe('CredentialEdit', () => {
await retry(() => expect(queryByTestId('credential-save-button')).toBeInTheDocument());
});
test('does not inherit HTTP Request domain restriction fields when credential skips them', async () => {
const pinia = createTestingPinia({
initialState: {
[STORES.UI]: {
modalsById: {
[CREDENTIAL_EDIT_MODAL_KEY]: { open: true },
},
},
[STORES.SETTINGS]: {
settings: {
enterprise: {
sharing: true,
externalSecrets: false,
},
templates: {
host: '',
},
},
},
[STORES.PROJECTS]: {
personalProject: {
id: 'personal-project',
type: 'personal',
scopes: ['credential:create', 'credential:read'],
},
},
},
});
const credStore = useCredentialsStore(pinia);
credStore.state.credentialTypes = {
oAuth2Api: {
...oAuth2Api,
properties: [
...oAuth2Api.properties,
{
displayName: 'Allowed HTTP Request Domains',
name: 'allowedHttpRequestDomains',
type: 'options',
options: [
{ name: 'All', value: 'all' },
{ name: 'Specific Domains', value: 'domains' },
],
default: 'all',
},
{
displayName: 'Allowed Domains',
name: 'allowedDomains',
type: 'string',
default: '',
},
],
},
openAiOAuth2Api: {
name: 'openAiOAuth2Api',
extends: ['oAuth2Api'],
displayName: 'OpenAI Account (ChatGPT)',
__skipHttpRequestDomainRestrictions: true,
properties: [
{
displayName:
'Use this credential to connect your ChatGPT/OpenAI account with device-code login. n8n will save the OAuth token automatically.',
name: 'notice',
type: 'notice',
default: '',
},
],
},
};
const { queryByText } = renderComponent({
props: {
activeId: 'openAiOAuth2Api',
modalName: CREDENTIAL_EDIT_MODAL_KEY,
mode: 'new',
},
pinia,
});
await retry(() =>
expect(
queryByText('Use this credential to connect your ChatGPT/OpenAI account', { exact: false }),
).toBeInTheDocument(),
);
expect(queryByText('Allowed HTTP Request Domains')).not.toBeInTheDocument();
expect(queryByText('Allowed Domains')).not.toBeInTheDocument();
});
test('hides the save button when credentialId exists and there are no unsaved changes', async () => {
const { queryByTestId } = renderComponent({
props: {

View File

@ -608,6 +608,13 @@ function getCredentialProperties(name: string): INodeProperties[] {
// The properties defined on the parent credentials take precedence
NodeHelpers.mergeNodeProperties(combineProperties, credentialTypeData.properties);
if (credentialTypeData.__skipHttpRequestDomainRestrictions === true) {
return combineProperties.filter(
(property) =>
property.name !== 'allowedHttpRequestDomains' && property.name !== 'allowedDomains',
);
}
return combineProperties;
}

View File

@ -87,6 +87,35 @@ describe('CredentialIcon', () => {
expect(getByRole('img')).toHaveAttribute('src', useRootStore().baseUrl + testIconUrl);
});
it('uses the requested theme when credential icon references a node icon', () => {
const lightIconUrl = 'icons/n8n-nodes-base/dist/nodes/Test/test.svg';
const darkIconUrl = 'icons/n8n-nodes-base/dist/nodes/Test/test.dark.svg';
useCredentialsStore().setCredentialTypes([
mock<ICredentialType>({
name: 'test',
icon: 'node:n8n-nodes-base.test',
}),
]);
useNodeTypesStore().setNodeTypes([
mock<INodeTypeDescription>({
version: 1,
name: 'n8n-nodes-base.test',
iconUrl: { light: lightIconUrl, dark: darkIconUrl },
}),
]);
const { getByRole } = renderComponent({
pinia,
props: {
credentialTypeName: 'test',
theme: 'light',
},
});
expect(getByRole('img')).toHaveAttribute('src', useRootStore().baseUrl + lightIconUrl);
});
it('shows named icon when referenced node uses a named icon (e.g. HTTP Request)', () => {
useCredentialsStore().setCredentialTypes([
mock<ICredentialType>({

View File

@ -32,6 +32,12 @@ const referencedNodeIconSource = computed(() => {
if (!icon?.startsWith('node:')) return undefined;
const nodeType = nodeTypesStore.getNodeType(icon.replace('node:', ''));
if (!nodeType) return undefined;
const themeIconUrl = getThemedValue(nodeType.iconUrl, theme.value);
if (themeIconUrl) {
return { type: 'file' as const, src: rootStore.baseUrl + themeIconUrl };
}
return getNodeIconSource(nodeType, null, null);
});

View File

@ -3,9 +3,33 @@ import type {
ICredentialTestRequest,
ICredentialType,
IHttpRequestOptions,
IDataObject,
INodeProperties,
} from 'n8n-workflow';
function isDataObject(value: unknown): value is IDataObject {
return Boolean(value) && typeof value === 'object' && !Array.isArray(value);
}
export function getOpenAiApiKey(credentials: ICredentialDataDecryptedObject): string {
const { oauthTokenData } = credentials;
let oauthAccessToken = '';
if (isDataObject(oauthTokenData)) {
oauthAccessToken =
typeof oauthTokenData.access_token === 'string' ? oauthTokenData.access_token : '';
}
const apiKey = typeof credentials.apiKey === 'string' ? credentials.apiKey : '';
return apiKey || oauthAccessToken;
}
export const OPEN_AI_API_CREDENTIAL_TYPE = 'openAiApi';
export const OPEN_AI_OAUTH2_CREDENTIAL_TYPE = 'openAiOAuth2Api';
export function getOpenAiCredentialType(authentication: unknown): string {
return authentication === 'oAuth2' ? OPEN_AI_OAUTH2_CREDENTIAL_TYPE : OPEN_AI_API_CREDENTIAL_TYPE;
}
export class OpenAiApi implements ICredentialType {
name = 'openAiApi';
@ -84,7 +108,7 @@ export class OpenAiApi implements ICredentialType {
): Promise<IHttpRequestOptions> {
requestOptions.headers ??= {};
requestOptions.headers['Authorization'] = `Bearer ${credentials.apiKey}`;
requestOptions.headers['Authorization'] = `Bearer ${getOpenAiApiKey(credentials)}`;
requestOptions.headers['OpenAI-Organization'] = credentials.organizationId;
if (

View File

@ -0,0 +1,83 @@
import type { ICredentialType, INodeProperties, Icon } from 'n8n-workflow';
const OPENAI_CODEX_CLIENT_ID = 'app_EMoamEEZ73f0CkXaXp7hrann';
const OPENAI_CODEX_SCOPES = 'openid profile email offline_access';
export class OpenAiOAuth2Api implements ICredentialType {
name = 'openAiOAuth2Api';
extends = ['oAuth2Api'];
// eslint-disable-next-line n8n-nodes-base/cred-class-field-display-name-missing-oauth2
displayName = 'OpenAI Account (ChatGPT)';
icon: Icon = 'node:n8n-nodes-base.openAi';
documentationUrl = 'openai';
__hideOAuthRedirectUrl = true;
__skipHttpRequestDomainRestrictions = true;
properties: INodeProperties[] = [
{
displayName:
'Use this credential to connect your ChatGPT/OpenAI account with device-code login. n8n will save the OAuth token automatically.',
name: 'notice',
type: 'notice',
default: '',
},
{
displayName: 'Grant Type',
name: 'grantType',
type: 'hidden',
default: 'pkce',
},
{
displayName: 'Authorization URL',
name: 'authUrl',
type: 'hidden',
default: 'https://auth.openai.com/oauth/authorize',
required: true,
},
{
displayName: 'Access Token URL',
name: 'accessTokenUrl',
type: 'hidden',
default: 'https://auth.openai.com/oauth/token',
required: true,
},
{
displayName: 'Client ID',
name: 'clientId',
type: 'hidden',
default: OPENAI_CODEX_CLIENT_ID,
required: true,
},
{
displayName: 'Client Secret',
name: 'clientSecret',
type: 'hidden',
typeOptions: { password: true },
default: '',
},
{
displayName: 'Scope',
name: 'scope',
type: 'hidden',
default: OPENAI_CODEX_SCOPES,
},
{
displayName: 'Auth URI Query Parameters',
name: 'authQueryParameters',
type: 'hidden',
default: '',
},
{
displayName: 'Authentication',
name: 'authentication',
type: 'hidden',
default: 'body',
},
];
}

View File

@ -1,6 +1,7 @@
import type { ICredentialDataDecryptedObject, IHttpRequestOptions } from 'n8n-workflow';
import { OpenAiApi } from '../OpenAiApi.credentials';
import { getOpenAiApiKey, getOpenAiCredentialType, OpenAiApi } from '../OpenAiApi.credentials';
import { OpenAiOAuth2Api } from '../OpenAiOAuth2Api.credentials';
describe('OpenAiApi Credential', () => {
const openAiApi = new OpenAiApi();
@ -10,14 +11,62 @@ describe('OpenAiApi Credential', () => {
expect(openAiApi.displayName).toBe('OpenAI');
expect(openAiApi.documentationUrl).toBe('openai');
expect(openAiApi.properties).toHaveLength(6);
expect(openAiApi.properties[0]).toEqual(
expect.objectContaining({
name: 'apiKey',
type: 'string',
}),
);
expect(openAiApi.test.request.baseURL).toBe('={{$credentials?.url}}');
expect(openAiApi.test.request.url).toBe('/models');
});
describe('getOpenAiApiKey', () => {
it('should prefer the API key when present', () => {
expect(
getOpenAiApiKey({
apiKey: 'test-openai-api-key',
}),
).toBe('test-openai-api-key');
});
it('should prefer the API key over OAuth token data when both are present', () => {
expect(
getOpenAiApiKey({
apiKey: 'test-openai-api-key',
oauthTokenData: {
access_token: 'oauth-token-data',
},
}),
).toBe('test-openai-api-key');
});
it('should return token data from OpenAI OAuth credentials', () => {
expect(
getOpenAiApiKey({
oauthTokenData: {
access_token: 'oauth-token-data',
},
}),
).toBe('oauth-token-data');
});
});
describe('getOpenAiCredentialType', () => {
it('should return the API key credential type by default', () => {
expect(getOpenAiCredentialType(undefined)).toBe('openAiApi');
expect(getOpenAiCredentialType('apiKey')).toBe('openAiApi');
});
it('should return the OAuth2 credential type for OAuth2 authentication', () => {
expect(getOpenAiCredentialType('oAuth2')).toBe('openAiOAuth2Api');
});
});
describe('authenticate', () => {
it('should add Authorization header with API key only', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
};
const requestOptions: IHttpRequestOptions = {
@ -29,14 +78,14 @@ describe('OpenAiApi Credential', () => {
const result = await openAiApi.authenticate(credentials, requestOptions);
expect(result.headers).toEqual({
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
'OpenAI-Organization': undefined,
});
});
it('should add Authorization and Organization headers', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
organizationId: 'org-123',
};
@ -49,14 +98,14 @@ describe('OpenAiApi Credential', () => {
const result = await openAiApi.authenticate(credentials, requestOptions);
expect(result.headers).toEqual({
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
'OpenAI-Organization': 'org-123',
});
});
it('should add custom header when header toggle is enabled', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
organizationId: 'org-123',
header: true,
headerName: 'X-Custom-Header',
@ -72,7 +121,7 @@ describe('OpenAiApi Credential', () => {
const result = await openAiApi.authenticate(credentials, requestOptions);
expect(result.headers).toEqual({
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
'OpenAI-Organization': 'org-123',
'X-Custom-Header': 'custom-value-123',
});
@ -80,7 +129,7 @@ describe('OpenAiApi Credential', () => {
it('should not add custom header when header toggle is disabled', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
header: false,
headerName: 'X-Custom-Header',
headerValue: 'custom-value-123',
@ -95,7 +144,7 @@ describe('OpenAiApi Credential', () => {
const result = await openAiApi.authenticate(credentials, requestOptions);
expect(result.headers).toEqual({
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
'OpenAI-Organization': undefined,
});
expect(result.headers?.['X-Custom-Header']).toBeUndefined();
@ -103,7 +152,7 @@ describe('OpenAiApi Credential', () => {
it('should preserve existing headers', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
header: true,
headerName: 'X-Custom-Header',
headerValue: 'custom-value-123',
@ -125,7 +174,7 @@ describe('OpenAiApi Credential', () => {
expect(headers).toEqual(
expect.objectContaining({
authorization: 'Bearer sk-test123456789',
authorization: 'Bearer test-openai-api-key',
'x-custom-header': 'custom-value-123',
'openai-organization': undefined,
}),
@ -134,7 +183,7 @@ describe('OpenAiApi Credential', () => {
it('should handle empty organization ID', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
organizationId: '',
};
@ -147,14 +196,14 @@ describe('OpenAiApi Credential', () => {
const result = await openAiApi.authenticate(credentials, requestOptions);
expect(result.headers).toEqual({
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
'OpenAI-Organization': '',
});
});
it('should preserve existing headers when adding auth headers', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
};
const requestOptions: IHttpRequestOptions = {
@ -169,13 +218,13 @@ describe('OpenAiApi Credential', () => {
expect(result.headers).toEqual({
'OpenAI-Beta': 'assistants=v2',
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
});
});
it('should preserve existing headers even with custom header option enabled', async () => {
const credentials: ICredentialDataDecryptedObject = {
apiKey: 'sk-test123456789',
apiKey: 'test-openai-api-key',
header: true,
headerName: 'X-Additional-Header',
headerValue: 'additional-value',
@ -195,9 +244,57 @@ describe('OpenAiApi Credential', () => {
expect(result.headers).toEqual({
'OpenAI-Beta': 'assistants=v2',
'X-Existing-Header': 'existing-value',
Authorization: 'Bearer sk-test123456789',
Authorization: 'Bearer test-openai-api-key',
'X-Additional-Header': 'additional-value',
});
});
});
});
describe('OpenAiOAuth2Api Credential', () => {
const openAiOAuth2Api = new OpenAiOAuth2Api();
it('should configure OpenAI device-code OAuth2 with PKCE metadata', () => {
expect(openAiOAuth2Api.name).toBe('openAiOAuth2Api');
expect(openAiOAuth2Api.extends).toEqual(['oAuth2Api']);
expect(openAiOAuth2Api.displayName).toBe('OpenAI Account (ChatGPT)');
expect(openAiOAuth2Api.icon).toBe('node:n8n-nodes-base.openAi');
expect(openAiOAuth2Api.__hideOAuthRedirectUrl).toBe(true);
expect(openAiOAuth2Api.__skipHttpRequestDomainRestrictions).toBe(true);
expect(openAiOAuth2Api.properties).toEqual(
expect.arrayContaining([
expect.objectContaining({
name: 'notice',
type: 'notice',
displayName:
'Use this credential to connect your ChatGPT/OpenAI account with device-code login. n8n will save the OAuth token automatically.',
}),
expect.objectContaining({ name: 'grantType', type: 'hidden', default: 'pkce' }),
expect.objectContaining({
name: 'authUrl',
type: 'hidden',
default: 'https://auth.openai.com/oauth/authorize',
}),
expect.objectContaining({
name: 'accessTokenUrl',
type: 'hidden',
default: 'https://auth.openai.com/oauth/token',
}),
expect.objectContaining({
name: 'scope',
type: 'hidden',
default: 'openid profile email offline_access',
}),
expect.objectContaining({ name: 'authentication', type: 'hidden', default: 'body' }),
]),
);
expect(openAiOAuth2Api.properties).toEqual(
expect.not.arrayContaining([
expect.objectContaining({ name: 'url' }),
expect.objectContaining({ name: 'header' }),
expect.objectContaining({ name: 'organizationId' }),
]),
);
});
});

View File

@ -279,6 +279,7 @@
"dist/credentials/OneSimpleApi.credentials.js",
"dist/credentials/OnfleetApi.credentials.js",
"dist/credentials/OpenAiApi.credentials.js",
"dist/credentials/OpenAiOAuth2Api.credentials.js",
"dist/credentials/OpenCTIApi.credentials.js",
"dist/credentials/OpenWeatherMapApi.credentials.js",
"dist/credentials/OracleDBApi.credentials.js",

View File

@ -370,6 +370,8 @@ export interface ICredentialType {
documentationUrl?: string;
__overwrittenProperties?: string[];
__skipManagedCreation?: boolean;
__hideOAuthRedirectUrl?: boolean;
__skipHttpRequestDomainRestrictions?: boolean;
authenticate?: IAuthenticate;
preAuthentication?: (
this: IHttpRequestHelper,