From cb85785cb1a6d0065accc28aaaf05cb662c9e331 Mon Sep 17 00:00:00 2001 From: Jake Turner Date: Wed, 28 Jan 2026 23:53:42 +0000 Subject: [PATCH] feat(Ollama): fallback list of recommended models if API down --- admin/app/services/openwebui_service.ts | 7 ++- admin/constants/ollama.ts | 57 +++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 admin/constants/ollama.ts diff --git a/admin/app/services/openwebui_service.ts b/admin/app/services/openwebui_service.ts index 15d3d24..3ae0e37 100644 --- a/admin/app/services/openwebui_service.ts +++ b/admin/app/services/openwebui_service.ts @@ -7,6 +7,7 @@ import fs from 'node:fs/promises' import path from 'node:path' import { PassThrough } from 'node:stream' import { DownloadModelJob } from '#jobs/download_model_job' +import { FALLBACK_RECOMMENDED_OLLAMA_MODELS } from '../../constants/ollama.js' const NOMAD_MODELS_API_BASE_URL = 'https://api.projectnomad.us/api/v1/ollama/models' const MODELS_CACHE_FILE = path.join(process.cwd(), 'storage', 'ollama-models-cache.json') @@ -352,7 +353,11 @@ export class OpenWebUIService { try { const models = await this.retrieveAndRefreshModels(sort) if (!models) { - return null + // If we fail to get models from the API, return the fallback recommended models + logger.warn( + '[OpenWebUIService] Returning fallback recommended models due to failure in fetching available models' + ) + return FALLBACK_RECOMMENDED_OLLAMA_MODELS; } if (!recommendedOnly) { diff --git a/admin/constants/ollama.ts b/admin/constants/ollama.ts new file mode 100644 index 0000000..13b43cb --- /dev/null +++ b/admin/constants/ollama.ts @@ -0,0 +1,57 @@ +import { NomadOllamaModel } from '../types/ollama.js' + +/** + * Fallback basic recommended Ollama models in case fetching from the service fails. + */ +export const FALLBACK_RECOMMENDED_OLLAMA_MODELS: NomadOllamaModel[] = [ + { + name: 'llama3.1', + description: + 'Llama 3.1 is a new state-of-the-art model from Meta available in 8B, 70B and 405B parameter sizes.', + estimated_pulls: '109.3M', + id: '9fe9c575-e77e-4a51-a743-07359458ee71', + first_seen: '2026-01-28T23:37:31.000+00:00', + model_last_updated: '1 year ago', + tags: [ + { + name: 'llama3.1:8b-text-q4_1', + size: '5.1 GB', + context: '128k', + input: 'Text', + }, + ], + }, + { + name: 'deepseek-r1', + description: + 'DeepSeek-R1 is a family of open reasoning models with performance approaching that of leading models, such as O3 and Gemini 2.5 Pro.', + estimated_pulls: '77.2M', + id: '0b566560-68a6-4964-b0d4-beb3ab1ad694', + first_seen: '2026-01-28T23:37:31.000+00:00', + model_last_updated: '7 months ago', + tags: [ + { + name: 'deepseek-r1:1.5b', + size: '1.1 GB', + context: '128k', + input: 'Text', + }, + ], + }, + { + name: 'llama3.2', + description: "Meta's Llama 3.2 goes small with 1B and 3B models.", + estimated_pulls: '54.7M', + id: 'c9a1bc23-b290-4501-a913-f7c9bb39c3ad', + first_seen: '2026-01-28T23:37:31.000+00:00', + model_last_updated: '1 year ago', + tags: [ + { + name: 'llama3.2:1b-text-q2_K', + size: '581 MB', + context: '128k', + input: 'Text', + }, + ], + }, +]