From dd7d5b023041dd4cd58021d729e561834d1cd393 Mon Sep 17 00:00:00 2001 From: Henry Estela Date: Tue, 24 Mar 2026 11:24:50 -0700 Subject: [PATCH] fix(ai-chat): easy setup with remote ollama wasnt finishing install This also makes it so that nomad_ollama container is not required when doing a remote llm setup. --- admin/app/services/system_service.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/admin/app/services/system_service.ts b/admin/app/services/system_service.ts index 1e68f2c..9f3bfdf 100644 --- a/admin/app/services/system_service.ts +++ b/admin/app/services/system_service.ts @@ -659,6 +659,11 @@ export class SystemService { if (service.installed) { // If marked as installed but container doesn't exist, mark as not installed if (!containerExists) { + // Exception: remote Ollama is configured without a local container — don't reset it + if (service.service_name === SERVICE_NAMES.OLLAMA) { + const remoteUrl = await KVStore.getValue('ai.remoteOllamaUrl') + if (remoteUrl) continue + } logger.warn( `Service ${service.service_name} is marked as installed but container does not exist. Marking as not installed.` )