fix: prevent embedding retry storm when Ollama is not installed

When Ollama isn't installed, every ZIM download dispatches embedding jobs
that fail and retry 30x with 60s backoff. With many ZIM files downloading
in parallel, this exhausts Redis connections with EPIPE/ECONNRESET errors.

Two changes:
1. Don't dispatch embedding jobs when Ollama isn't installed (belt)
2. Use BullMQ UnrecoverableError for "not installed" so jobs fail
   immediately without retrying (suspenders)

Closes #351

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Chris Sherwood 2026-03-15 17:11:30 -07:00 committed by Jake Turner
parent 5e0fba29ca
commit 34076b107b
2 changed files with 23 additions and 12 deletions

View File

@ -1,4 +1,4 @@
import { Job } from 'bullmq' import { Job, UnrecoverableError } from 'bullmq'
import { QueueService } from '#services/queue_service' import { QueueService } from '#services/queue_service'
import { EmbedJobWithProgress } from '../../types/rag.js' import { EmbedJobWithProgress } from '../../types/rag.js'
import { RagService } from '#services/rag_service' import { RagService } from '#services/rag_service'
@ -42,7 +42,15 @@ export class EmbedFileJob {
const ragService = new RagService(dockerService, ollamaService) const ragService = new RagService(dockerService, ollamaService)
try { try {
// Check if Ollama and Qdrant services are ready // Check if Ollama and Qdrant services are installed and ready
// Use UnrecoverableError for "not installed" so BullMQ won't retry —
// retrying 30x when the service doesn't exist just wastes Redis connections
const ollamaUrl = await dockerService.getServiceURL('nomad_ollama')
if (!ollamaUrl) {
logger.warn('[EmbedFileJob] Ollama is not installed. Skipping embedding for: %s', fileName)
throw new UnrecoverableError('Ollama service is not installed. Install AI Assistant to enable file embeddings.')
}
const existingModels = await ollamaService.getModels() const existingModels = await ollamaService.getModels()
if (!existingModels) { if (!existingModels) {
logger.warn('[EmbedFileJob] Ollama service not ready yet. Will retry...') logger.warn('[EmbedFileJob] Ollama service not ready yet. Will retry...')
@ -51,8 +59,8 @@ export class EmbedFileJob {
const qdrantUrl = await dockerService.getServiceURL('nomad_qdrant') const qdrantUrl = await dockerService.getServiceURL('nomad_qdrant')
if (!qdrantUrl) { if (!qdrantUrl) {
logger.warn('[EmbedFileJob] Qdrant service not ready yet. Will retry...') logger.warn('[EmbedFileJob] Qdrant is not installed. Skipping embedding for: %s', fileName)
throw new Error('Qdrant service not ready yet') throw new UnrecoverableError('Qdrant service is not installed. Install AI Assistant to enable file embeddings.')
} }
logger.info(`[EmbedFileJob] Services ready. Processing file: ${fileName}`) logger.info(`[EmbedFileJob] Services ready. Processing file: ${fileName}`)

View File

@ -82,14 +82,17 @@ export class RunDownloadJob {
const zimService = new ZimService(dockerService) const zimService = new ZimService(dockerService)
await zimService.downloadRemoteSuccessCallback([url], true) await zimService.downloadRemoteSuccessCallback([url], true)
// Dispatch an embedding job for the downloaded ZIM file // Only dispatch embedding job if AI Assistant (Ollama) is installed
try { const ollamaUrl = await dockerService.getServiceURL('nomad_ollama')
await EmbedFileJob.dispatch({ if (ollamaUrl) {
fileName: url.split('/').pop() || '', try {
filePath: filepath, await EmbedFileJob.dispatch({
}) fileName: url.split('/').pop() || '',
} catch (error) { filePath: filepath,
console.error(`[RunDownloadJob] Error dispatching EmbedFileJob for URL ${url}:`, error) })
} catch (error) {
console.error(`[RunDownloadJob] Error dispatching EmbedFileJob for URL ${url}:`, error)
}
} }
} else if (filetype === 'map') { } else if (filetype === 'map') {
const mapsService = new MapService() const mapsService = new MapService()