fix(ai-chat): easy setup with remote ollama wasnt finishing install

This also makes it so that nomad_ollama container is not required when
doing a remote llm setup.
This commit is contained in:
Henry Estela 2026-03-24 11:24:50 -07:00
parent cac6917b1b
commit dd7d5b0230
No known key found for this signature in database
GPG Key ID: 90439853E9E235BA

View File

@ -659,6 +659,11 @@ export class SystemService {
if (service.installed) {
// If marked as installed but container doesn't exist, mark as not installed
if (!containerExists) {
// Exception: remote Ollama is configured without a local container — don't reset it
if (service.service_name === SERVICE_NAMES.OLLAMA) {
const remoteUrl = await KVStore.getValue('ai.remoteOllamaUrl')
if (remoteUrl) continue
}
logger.warn(
`Service ${service.service_name} is marked as installed but container does not exist. Marking as not installed.`
)