From 6646b3480b968d7f13522a1d86a23d7fecf217a9 Mon Sep 17 00:00:00 2001 From: chriscrosstalk <49691103+chriscrosstalk@users.noreply.github.com> Date: Mon, 20 Apr 2026 16:02:49 -0700 Subject: [PATCH] fix(AI): stop local nomad_ollama container when remote Ollama is configured (#744) When users set a remote Ollama URL via AI Settings, the local nomad_ollama container continued running and competed with the remote host for port 11434 and GPU access. Now configureRemote stops the local container on set and restores it on clear (if still present). Container and its models volume are preserved so the local install can be re-enabled later. Closes #662 Co-authored-by: Claude Opus 4.7 (1M context) --- admin/app/controllers/ollama_controller.ts | 62 ++++++++++++++++++++-- 1 file changed, 59 insertions(+), 3 deletions(-) diff --git a/admin/app/controllers/ollama_controller.ts b/admin/app/controllers/ollama_controller.ts index edd6f36..68b6b78 100644 --- a/admin/app/controllers/ollama_controller.ts +++ b/admin/app/controllers/ollama_controller.ts @@ -212,13 +212,21 @@ export default class OllamaController { return response.status(404).send({ success: false, message: 'Ollama service record not found.' }) } - // Clear path: null or empty URL removes remote config and marks service as not installed + // Clear path: null or empty URL removes remote config. If a local nomad_ollama container + // still exists (user had previously installed AI Assistant locally), restart it and keep + // the service marked installed. Otherwise fall back to uninstalled. if (!remoteUrl || remoteUrl.trim() === '') { await KVStore.clearValue('ai.remoteOllamaUrl') - ollamaService.installed = false + const hasLocalContainer = await this._startLocalOllamaContainerIfExists() + ollamaService.installed = hasLocalContainer ollamaService.installation_status = 'idle' await ollamaService.save() - return { success: true, message: 'Remote Ollama configuration cleared.' } + return { + success: true, + message: hasLocalContainer + ? 'Remote Ollama cleared. Local Ollama container restored.' + : 'Remote Ollama configuration cleared.', + } } // Validate URL format @@ -253,6 +261,10 @@ export default class OllamaController { ollamaService.installation_status = 'idle' await ollamaService.save() + // Stop the local nomad_ollama container (if running) so it doesn't compete with the + // remote host for GPU / port 11434. Preserves the container and its models volume. + await this._stopLocalOllamaContainer() + // Install Qdrant if not already installed (fire-and-forget) const qdrantService = await Service.query().where('service_name', SERVICE_NAMES.QDRANT).first() if (qdrantService && !qdrantService.installed) { @@ -270,6 +282,50 @@ export default class OllamaController { return { success: true, message: 'Remote Ollama configured.' } } + private async _stopLocalOllamaContainer(): Promise { + try { + const containers = await this.dockerService.docker.listContainers({ all: true }) + const ollamaContainer = containers.find((c) => + c.Names.includes(`/${SERVICE_NAMES.OLLAMA}`) + ) + if (!ollamaContainer || ollamaContainer.State !== 'running') { + return + } + await this.dockerService.docker.getContainer(ollamaContainer.Id).stop() + this.dockerService.invalidateServicesStatusCache() + logger.info('[OllamaController] Stopped local nomad_ollama (remote Ollama configured)') + } catch (error: any) { + logger.error( + { err: error }, + '[OllamaController] Failed to stop local nomad_ollama; remote Ollama is still active' + ) + } + } + + private async _startLocalOllamaContainerIfExists(): Promise { + try { + const containers = await this.dockerService.docker.listContainers({ all: true }) + const ollamaContainer = containers.find((c) => + c.Names.includes(`/${SERVICE_NAMES.OLLAMA}`) + ) + if (!ollamaContainer) { + return false + } + if (ollamaContainer.State !== 'running') { + await this.dockerService.docker.getContainer(ollamaContainer.Id).start() + this.dockerService.invalidateServicesStatusCache() + logger.info('[OllamaController] Started local nomad_ollama (remote Ollama cleared)') + } + return true + } catch (error: any) { + logger.error( + { err: error }, + '[OllamaController] Failed to start local nomad_ollama on remote clear' + ) + return false + } + } + async deleteModel({ request }: HttpContext) { const reqData = await request.validateUsing(modelNameSchema) await this.ollamaService.deleteModel(reqData.model)