feat(Open WebUI): manage models via Command Center

This commit is contained in:
Jake Turner 2026-01-20 06:14:57 +00:00 committed by Jake Turner
parent b3ef977484
commit 937da5d869
17 changed files with 1463 additions and 316 deletions

View File

@ -1,37 +1,7 @@
FROM node:22-slim AS base
FROM node:22.16.0-alpine3.22 AS base
# Install bash & curl for entrypoint script compatibility
# as well as dependencies for Playwright Chromium
RUN apt-get update && apt-get install -y \
bash \
curl \
wget \
ca-certificates \
fonts-liberation \
libnss3 \
libatk-bridge2.0-0 \
libdrm2 \
libxkbcommon0 \
libgbm1 \
libasound2 \
libxcb-shm0 \
libx11-xcb1 \
libxrandr2 \
libxcomposite1 \
libxcursor1 \
libxdamage1 \
libxfixes3 \
libxi6 \
libgtk-3-0t64 \
libpangocairo-1.0-0 \
libpango-1.0-0 \
libatk1.0-0t64 \
libcairo-gobject2 \
libcairo2 \
libgdk-pixbuf-2.0-0 \
libxrender1 \
libasound2t64
&& rm -rf /var/lib/apt/lists/*
RUN apk add --no-cache bash curl
# All deps stage
FROM base AS deps

View File

@ -0,0 +1,40 @@
import { OpenWebUIService } from '#services/openwebui_service'
import { modelNameSchema } from '#validators/download'
import { getAvailableModelsSchema } from '#validators/openwebui'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
@inject()
export default class OpenWebUIController {
constructor(private openWebUIService: OpenWebUIService) {}
async models({ request }: HttpContext) {
const reqData = await request.validateUsing(getAvailableModelsSchema)
return await this.openWebUIService.getAvailableModels({
sort: reqData.sort,
recommendedOnly: reqData.recommendedOnly,
})
}
async installedModels({}: HttpContext) {
return await this.openWebUIService.getInstalledModels()
}
async deleteModel({ request }: HttpContext) {
const reqData = await request.validateUsing(modelNameSchema)
await this.openWebUIService.deleteModel(reqData.model)
return {
success: true,
message: `Model deleted: ${reqData.model}`,
}
}
async dispatchModelDownload({ request }: HttpContext) {
const reqData = await request.validateUsing(modelNameSchema)
await this.openWebUIService.dispatchModelDownload(reqData.model)
return {
success: true,
message: `Download job dispatched for model: ${reqData.model}`,
}
}
}

View File

@ -46,9 +46,11 @@ export default class SettingsController {
}
async models({ inertia }: HttpContext) {
const availableModels = await this.openWebUIService.getAvailableModels();
const installedModels = await this.openWebUIService.getInstalledModels();
return inertia.render('settings/models', {
models: {
availableModels: availableModels || [],
installedModels: installedModels || []
}
});

View File

@ -0,0 +1,134 @@
import { Job } from 'bullmq'
import { QueueService } from '#services/queue_service'
import { OpenWebUIService } from '#services/openwebui_service'
import { createHash } from 'crypto'
import logger from '@adonisjs/core/services/logger'
import { DockerService } from '#services/docker_service'
export interface DownloadModelJobParams {
modelName: string
}
export class DownloadModelJob {
static get queue() {
return 'model-downloads'
}
static get key() {
return 'download-model'
}
static getJobId(modelName: string): string {
return createHash('sha256').update(modelName).digest('hex').slice(0, 16)
}
async handle(job: Job) {
const { modelName } = job.data as DownloadModelJobParams
logger.info(`[DownloadModelJob] Attempting to download model: ${modelName}`)
// Check if OpenWebUI/Ollama services are ready
const dockerService = new DockerService()
const openWebUIService = new OpenWebUIService(dockerService)
// Use getInstalledModels to check if the service is ready
// Even if no models are installed, this should return an empty array if ready
const existingModels = await openWebUIService.getInstalledModels()
if (!existingModels) {
logger.warn(
`[DownloadModelJob] OpenWebUI service not ready yet for model ${modelName}. Will retry...`
)
throw new Error('OpenWebUI service not ready yet')
}
logger.info(
`[DownloadModelJob] OpenWebUI service is ready. Initiating download for ${modelName}`
)
// Services are ready, initiate the download with progress tracking
const result = await openWebUIService._downloadModel(modelName, (progress) => {
// Update job progress in BullMQ
const progressData = {
status: progress.status,
percent: progress.percent,
completed: progress.completed,
total: progress.total,
}
// Update the job progress (0-100 scale for BullMQ)
if (progress.percent !== undefined) {
job.updateProgress(progress.percent)
}
// Log progress with job context
if (progress.percent !== undefined) {
logger.info(
`[DownloadModelJob] Model ${modelName}: ${progress.status} - ${progress.percent}% (${progress.completed}/${progress.total} bytes)`
)
} else {
logger.info(`[DownloadModelJob] Model ${modelName}: ${progress.status}`)
}
// Store detailed progress in job data for clients to query
job.updateData({
...job.data,
progress: progressData,
})
})
if (!result.success) {
logger.error(
`[DownloadModelJob] Failed to initiate download for model ${modelName}: ${result.message}`
)
throw new Error(`Failed to initiate download for model: ${result.message}`)
}
logger.info(`[DownloadModelJob] Successfully completed download for model ${modelName}`)
return {
modelName,
message: result.message,
}
}
static async getByModelName(modelName: string): Promise<Job | undefined> {
const queueService = new QueueService()
const queue = queueService.getQueue(this.queue)
const jobId = this.getJobId(modelName)
return await queue.getJob(jobId)
}
static async dispatch(params: DownloadModelJobParams) {
const queueService = new QueueService()
const queue = queueService.getQueue(this.queue)
const jobId = this.getJobId(params.modelName)
try {
const job = await queue.add(this.key, params, {
jobId,
attempts: 40, // Many attempts since services may take considerable time to install
backoff: {
type: 'fixed',
delay: 60000, // Check every 60 seconds
},
removeOnComplete: false, // Keep for status checking
removeOnFail: false, // Keep failed jobs for debugging
})
return {
job,
created: true,
message: `Dispatched model download job for ${params.modelName}`,
}
} catch (error) {
if (error.message.includes('job already exists')) {
const existing = await queue.getJob(jobId)
return {
job: existing,
created: false,
message: `Job already exists for model ${params.modelName}`,
}
}
throw error
}
}
}

View File

@ -9,7 +9,7 @@ import { ZIM_STORAGE_PATH } from '../utils/fs.js'
@inject()
export class DockerService {
private docker: Docker
public docker: Docker
private activeInstallations: Set<string> = new Set()
public static KIWIX_SERVICE_NAME = 'nomad_kiwix_serve'
public static OLLAMA_SERVICE_NAME = 'nomad_ollama'

View File

@ -1,123 +1,646 @@
import { inject } from '@adonisjs/core'
import { chromium } from 'playwright'
import { SystemService } from './system_service.js'
import logger from '@adonisjs/core/services/logger'
import { DockerService } from './docker_service.js'
import { ServiceSlim } from '../../types/services.js'
import axios from 'axios'
import { NomadOllamaModel, OllamaModelListing } from '../../types/ollama.js'
import fs from 'node:fs/promises'
import path from 'node:path'
import { PassThrough } from 'node:stream'
import { DownloadModelJob } from '#jobs/download_model_job'
const NOMAD_MODELS_API_BASE_URL = 'https://api.projectnomad.us/api/v1/ollama/models'
const MODELS_CACHE_FILE = path.join(process.cwd(), 'storage', 'ollama-models-cache.json')
const CACHE_MAX_AGE_MS = 24 * 60 * 60 * 1000 // 24 hours
@inject()
export class OpenWebUIService {
constructor(private systemService: SystemService) {}
async getOpenWebUIToken(): Promise<{
token: string
location: string
} | null> {
try {
const { openWebUIService } = await this.getOpenWebUIAndOllamaServices()
if (!openWebUIService) {
logger.warn('[OpenWebUIService] Open WebUI service is not installed.')
return null
}
const location = this.extractOpenWebUIUrl(openWebUIService)
if (!location) {
logger.warn('[OpenWebUIService] Could not determine Open WebUI URL.')
return null
}
const browser = await chromium.launch({ headless: true })
const context = await browser.newContext()
const page = await context.newPage()
constructor(private dockerService: DockerService) {}
/** We need to call this in the DownloadModelJob, so it can't be private,
* but shouldn't be called directly (dispatch job instead)
*/
async _downloadModel(
model: string,
onProgress?: (progress: {
status: string
completed?: number
total?: number
percent?: number
}) => void
): Promise<{ success: boolean; message: string }> {
return new Promise((resolve) => {
try {
await page.goto(location, { waitUntil: 'networkidle' })
const cookies = await context.cookies()
const tokenCookie = cookies.find((cookie) => cookie.name === 'token')
if (tokenCookie) {
return { token: tokenCookie.value, location }
const container = this.dockerService.docker.getContainer(DockerService.OLLAMA_SERVICE_NAME)
if (!container) {
logger.warn('[OpenWebUIService] Ollama container is not running. Cannot download model.')
resolve({
success: false,
message: 'Ollama is not running. Please start Ollama and try again.',
})
return
}
return null
} finally {
await browser.close()
container.exec(
{
Cmd: ['ollama', 'pull', model],
AttachStdout: true,
AttachStderr: true,
},
(err, exec) => {
if (err) {
logger.error(
`[OpenWebUIService] Failed to execute model download command: ${
err instanceof Error ? err.message : err
}`
)
resolve({ success: false, message: 'Failed to execute download command.' })
return
}
if (!exec) {
logger.error('[OpenWebUIService] No exec instance returned from exec command')
resolve({ success: false, message: 'Failed to create exec instance.' })
return
}
exec.start(
{
hijack: true,
stdin: false,
},
(startErr, stream) => {
if (startErr) {
logger.error(
`[OpenWebUIService] Failed to start exec stream: ${
startErr instanceof Error ? startErr.message : startErr
}`
)
resolve({ success: false, message: 'Failed to start download stream.' })
return
}
if (!stream) {
logger.error('[OpenWebUIService] No stream returned when starting exec')
resolve({ success: false, message: 'No stream available.' })
return
}
// Create PassThrough streams to capture output
const stdout = new PassThrough()
const stderr = new PassThrough()
// Demultiplex the Docker stream
this.dockerService.docker.modem.demuxStream(stream, stdout, stderr)
// Capture and parse stdout (if any)
stdout.on('data', (chunk) => {
const output = chunk.toString()
logger.info(`[OpenWebUIService] Model download (stdout): ${output}`)
})
// Capture stderr - ollama sends progress/status here (not necessarily errors)
stderr.on('data', (chunk) => {
const output = chunk.toString()
// Check if this is an actual error message
if (
output.toLowerCase().includes('error') ||
output.toLowerCase().includes('failed')
) {
logger.error(`[OpenWebUIService] Model download error: ${output}`)
} else {
// This is normal progress/status output from ollama
logger.info(`[OpenWebUIService] Model download progress: ${output}`)
// Parse JSON progress if available
try {
const lines = output
.split('\n')
.filter(
(line: any) => typeof line.trim() === 'string' && line.trim().length > 0
)
for (const line of lines) {
const parsed = JSON.parse(line)
if (parsed.status) {
const progressData: {
status: string
completed?: number
total?: number
percent?: number
} = {
status: parsed.status,
}
// Extract byte progress if available
if (parsed.completed !== undefined && parsed.total !== undefined) {
progressData.completed = parsed.completed
progressData.total = parsed.total
progressData.percent = Math.round(
(parsed.completed / parsed.total) * 100
)
}
// Call progress callback
if (onProgress) {
onProgress(progressData)
}
// Log structured progress
if (progressData.percent !== undefined) {
logger.info(
`[OpenWebUIService] ${progressData.status}: ${progressData.percent}% (${progressData.completed}/${progressData.total} bytes)`
)
} else {
logger.info(`[OpenWebUIService] ${progressData.status}`)
}
}
}
} catch {
// Not JSON, already logged above
}
}
})
// Handle stream end
stream.on('end', () => {
logger.info(
`[OpenWebUIService] Model download process ended for model "${model}"`
)
resolve({
success: true,
message: 'Model download completed successfully.',
})
})
// Handle stream errors
stream.on('error', (streamErr) => {
logger.error(
`[OpenWebUIService] Error during model download stream: ${
streamErr instanceof Error ? streamErr.message : streamErr
}`
)
resolve({
success: false,
message: 'Error occurred during model download.',
})
})
}
)
}
)
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to download model "${model}": ${
error instanceof Error ? error.message : error
}`
)
resolve({ success: false, message: 'Failed to download model.' })
}
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to get Open WebUI token: ${error instanceof Error ? error.message : error}`
)
return null
}
})
}
async getInstalledModels(): Promise<string[] | null> {
try {
const tokenData = await this.getOpenWebUIToken()
if (!tokenData) {
logger.warn('[OpenWebUIService] Cannot get installed models without Open WebUI token.')
return null
}
async deleteModel(model: string): Promise<{ success: boolean; message: string }> {
return new Promise((resolve) => {
try {
const container = this.dockerService.docker.getContainer(DockerService.OLLAMA_SERVICE_NAME)
if (!container) {
logger.warn('[OpenWebUIService] Ollama container is not running. Cannot remove model.')
resolve({
success: false,
message: 'Ollama is not running. Please start Ollama and try again.',
})
return
}
const response = await axios.get(tokenData.location + '/ollama/api/tags', {
headers: {
Authorization: `Bearer ${tokenData.token}`,
},
container.exec(
{
Cmd: ['ollama', 'rm', model],
AttachStdout: true,
AttachStderr: true,
},
(err, exec) => {
if (err) {
logger.error(
`[OpenWebUIService] Failed to execute model remove command: ${
err instanceof Error ? err.message : err
}`
)
resolve({ success: false, message: 'Failed to execute remove command.' })
return
}
if (!exec) {
logger.error('[OpenWebUIService] No exec instance returned from remove command')
resolve({ success: false, message: 'Failed to create exec instance.' })
return
}
exec.start(
{
hijack: true,
stdin: false,
},
(startErr, stream) => {
if (startErr) {
logger.error(
`[OpenWebUIService] Failed to start exec stream for remove: ${
startErr instanceof Error ? startErr.message : startErr
}`
)
resolve({ success: false, message: 'Failed to start remove command.' })
return
}
if (!stream) {
logger.error('[OpenWebUIService] No stream returned for remove command')
resolve({ success: false, message: 'No stream available.' })
return
}
const stdout = new PassThrough()
const stderr = new PassThrough()
let output = ''
let errorOutput = ''
this.dockerService.docker.modem.demuxStream(stream, stdout, stderr)
stdout.on('data', (chunk) => {
output += chunk.toString()
})
stderr.on('data', (chunk) => {
errorOutput += chunk.toString()
})
stream.on('end', () => {
if (errorOutput) {
logger.error(`[OpenWebUIService] Error removing model: ${errorOutput}`)
resolve({
success: false,
message: errorOutput.trim() || 'Failed to remove model.',
})
return
}
logger.info(`[OpenWebUIService] Successfully removed model "${model}"`)
if (output) {
logger.info(`[OpenWebUIService] Remove output: ${output}`)
}
resolve({
success: true,
message: 'Model removed successfully.',
})
})
stream.on('error', (streamErr) => {
logger.error(
`[OpenWebUIService] Stream error during model remove: ${
streamErr instanceof Error ? streamErr.message : streamErr
}`
)
resolve({
success: false,
message: 'Error occurred while removing model.',
})
})
}
)
}
)
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to remove model "${model}": ${
error instanceof Error ? error.message : error
}`
)
resolve({ success: false, message: 'Failed to remove model.' })
}
})
}
async dispatchModelDownload(modelName: string): Promise<{ success: boolean; message: string }> {
try {
logger.info(`[OpenWebUIService] Dispatching model download for ${modelName} via job queue`)
await DownloadModelJob.dispatch({
modelName,
})
if (response.status === 200 && response.data.models && Array.isArray(response.data.models)) {
console.log("GOT RESPONSE DATA:", response.data)
return response.data.models as string[]
return {
success: true,
message:
'Model download has been queued successfully. It will start shortly after Ollama and Open WebUI are ready (if not already).',
}
logger.warn(
`[OpenWebUIService] Unexpected response when fetching installed models: ${response.status}`
)
return null
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to get installed models: ${error instanceof Error ? error.message : error}`
`[OpenWebUIService] Failed to dispatch model download for ${modelName}: ${error instanceof Error ? error.message : error}`
)
return null
return {
success: false,
message: 'Failed to queue model download. Please try again.',
}
}
}
private async getOpenWebUIAndOllamaServices(): Promise<{
openWebUIService: ServiceSlim | null
ollamaService: ServiceSlim | null
}> {
async getAvailableModels(
{ sort, recommendedOnly }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean } = {
sort: 'pulls',
recommendedOnly: false,
}
): Promise<NomadOllamaModel[] | null> {
try {
const services = await this.systemService.getServices({ installedOnly: true })
const owuiContainer = services.find(
(service) => service.service_name === DockerService.OPEN_WEBUI_SERVICE_NAME
)
const ollamaContainer = services.find(
(service) => service.service_name === DockerService.OLLAMA_SERVICE_NAME
)
return {
openWebUIService: owuiContainer || null,
ollamaService: ollamaContainer || null,
const models = await this.retrieveAndRefreshModels(sort)
if (!models) {
return null
}
if (!recommendedOnly) {
return models
}
// If recommendedOnly is true, only return the first three models (if sorted by pulls, these will be the top 3)
const sortedByPulls = sort === 'pulls' ? models : this.sortModels(models, 'pulls')
const firstThree = sortedByPulls.slice(0, 3)
// Only return the first tag of each of these models (should be the most lightweight variant)
const recommendedModels = firstThree.map((model) => {
return {
...model,
tags: model.tags && model.tags.length > 0 ? [model.tags[0]] : [],
}
})
return recommendedModels
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to get Open WebUI and Ollama services: ${error instanceof Error ? error.message : error}`
`[OpenWebUIService] Failed to get available models: ${error instanceof Error ? error.message : error}`
)
return {
openWebUIService: null,
ollamaService: null,
}
return null
}
}
private extractOpenWebUIUrl(service: ServiceSlim): string | null {
const location = service.ui_location || '3000'
if (!location || isNaN(Number(location))) {
logger.warn(`[OpenWebUIService] Invalid Open WebUI location: ${location}`)
async getInstalledModels(): Promise<OllamaModelListing[] | null> {
return new Promise((resolve) => {
try {
const container = this.dockerService.docker.getContainer(DockerService.OLLAMA_SERVICE_NAME)
if (!container) {
logger.warn('[OpenWebUIService] Ollama container is not running. Cannot list models.')
resolve(null)
return
}
container.exec(
{
Cmd: ['ollama', 'list'],
AttachStdout: true,
AttachStderr: true,
},
(err, exec) => {
if (err) {
logger.error(
`[OpenWebUIService] Failed to execute ollama list command: ${
err instanceof Error ? err.message : err
}`
)
resolve(null)
return
}
if (!exec) {
logger.error('[OpenWebUIService] No exec instance returned from ollama list')
resolve(null)
return
}
exec.start(
{
hijack: true,
stdin: false,
},
(startErr, stream) => {
if (startErr) {
logger.error(
`[OpenWebUIService] Failed to start exec stream for ollama list: ${
startErr instanceof Error ? startErr.message : startErr
}`
)
resolve(null)
return
}
if (!stream) {
logger.error('[OpenWebUIService] No stream returned for ollama list')
resolve(null)
return
}
const stdout = new PassThrough()
const stderr = new PassThrough()
let output = ''
let errorOutput = ''
this.dockerService.docker.modem.demuxStream(stream, stdout, stderr)
stdout.on('data', (chunk) => {
output += chunk.toString()
})
stderr.on('data', (chunk) => {
errorOutput += chunk.toString()
})
stream.on('end', () => {
if (errorOutput) {
logger.error(
`[OpenWebUIService] Error from ollama list command: ${errorOutput}`
)
}
if (!output) {
logger.info('[OpenWebUIService] No models installed')
resolve([])
return
}
try {
// Parse the tabular output from ollama list
// Expected format:
// NAME ID SIZE MODIFIED
// llama2:latest abc123def456 3.8 GB 2 days ago
const lines = output.split('\n').filter((line) => line.trim())
// Skip header line and parse model entries
const models: OllamaModelListing[] = []
for (let i = 1; i < lines.length; i++) {
const line = lines[i].trim()
if (!line) continue
// Split by whitespace (2+ spaces to handle columns with spaces)
const parts = line.split(/\s{2,}/)
if (parts.length >= 4) {
models.push({
name: parts[0].trim(),
id: parts[1].trim(),
size: parts[2].trim(),
modified: parts[3].trim(),
})
}
}
logger.info(`[OpenWebUIService] Found ${models.length} installed models`)
resolve(models)
} catch (parseError) {
logger.error(
`[OpenWebUIService] Failed to parse ollama list output: ${
parseError instanceof Error ? parseError.message : parseError
}`
)
logger.debug(`[OpenWebUIService] Raw output: ${output}`)
resolve(null)
}
})
stream.on('error', (streamErr) => {
logger.error(
`[OpenWebUIService] Stream error during ollama list: ${
streamErr instanceof Error ? streamErr.message : streamErr
}`
)
resolve(null)
})
}
)
}
)
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to get installed models: ${
error instanceof Error ? error.message : error
}`
)
resolve(null)
}
})
}
private async retrieveAndRefreshModels(
sort?: 'pulls' | 'name'
): Promise<NomadOllamaModel[] | null> {
try {
const cachedModels = await this.readModelsFromCache()
if (cachedModels) {
logger.info('[OpenWebUIService] Using cached available models data')
return this.sortModels(cachedModels, sort)
}
logger.info('[OpenWebUIService] Fetching fresh available models from API')
const response = await axios.get(NOMAD_MODELS_API_BASE_URL)
if (!response.data || !Array.isArray(response.data.models)) {
logger.warn(
`[OpenWebUIService] Invalid response format when fetching available models: ${JSON.stringify(response.data)}`
)
return null
}
const models = response.data.models as NomadOllamaModel[]
await this.writeModelsToCache(models)
return this.sortModels(models, sort)
} catch (error) {
logger.error(
`[OpenWebUIService] Failed to retrieve models from Nomad API: ${
error instanceof Error ? error.message : error
}`
)
return null
}
return `http://localhost:${location}`
}
private async readModelsFromCache(): Promise<NomadOllamaModel[] | null> {
try {
const stats = await fs.stat(MODELS_CACHE_FILE)
const cacheAge = Date.now() - stats.mtimeMs
if (cacheAge > CACHE_MAX_AGE_MS) {
logger.info('[OpenWebUIService] Cache is stale, will fetch fresh data')
return null
}
const cacheData = await fs.readFile(MODELS_CACHE_FILE, 'utf-8')
const models = JSON.parse(cacheData) as NomadOllamaModel[]
if (!Array.isArray(models)) {
logger.warn('[OpenWebUIService] Invalid cache format, will fetch fresh data')
return null
}
return models
} catch (error) {
// Cache doesn't exist or is invalid
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
logger.warn(
`[OpenWebUIService] Error reading cache: ${error instanceof Error ? error.message : error}`
)
}
return null
}
}
private async writeModelsToCache(models: NomadOllamaModel[]): Promise<void> {
try {
await fs.mkdir(path.dirname(MODELS_CACHE_FILE), { recursive: true })
await fs.writeFile(MODELS_CACHE_FILE, JSON.stringify(models, null, 2), 'utf-8')
logger.info('[OpenWebUIService] Successfully cached available models')
} catch (error) {
logger.warn(
`[OpenWebUIService] Failed to write models cache: ${error instanceof Error ? error.message : error}`
)
}
}
private sortModels(models: NomadOllamaModel[], sort?: 'pulls' | 'name'): NomadOllamaModel[] {
if (sort === 'pulls') {
// Sort by estimated pulls (it should be a string like "1.2K", "500", "4M" etc.)
models.sort((a, b) => {
const parsePulls = (pulls: string) => {
const multiplier = pulls.endsWith('K')
? 1_000
: pulls.endsWith('M')
? 1_000_000
: pulls.endsWith('B')
? 1_000_000_000
: 1
return parseFloat(pulls) * multiplier
}
return parsePulls(b.estimated_pulls) - parsePulls(a.estimated_pulls)
})
} else if (sort === 'name') {
models.sort((a, b) => a.name.localeCompare(b.name))
}
// Always sort model.tags by the size field in descending order
// Size is a string like '75GB', '8.5GB', '2GB' etc. Smaller models first
models.forEach((model) => {
if (model.tags && Array.isArray(model.tags)) {
model.tags.sort((a, b) => {
const parseSize = (size: string) => {
const multiplier = size.endsWith('KB')
? 1 / 1_000
: size.endsWith('MB')
? 1 / 1_000_000
: size.endsWith('GB')
? 1
: size.endsWith('TB')
? 1_000
: 0 // Unknown size format
return parseFloat(size) * multiplier
}
return parseSize(a.size) - parseSize(b.size)
})
}
})
return models
}
}

View File

@ -7,3 +7,9 @@ export const downloadJobsByFiletypeSchema = vine.compile(
}),
})
)
export const modelNameSchema = vine.compile(
vine.object({
model: vine.string(),
})
)

View File

@ -0,0 +1,8 @@
import vine from '@vinejs/vine'
export const getAvailableModelsSchema = vine.compile(
vine.object({
sort: vine.enum(['pulls', 'name'] as const).optional(),
recommendedOnly: vine.boolean().optional(),
})
)

View File

@ -60,7 +60,9 @@ export default class QueueWork extends BaseCommand {
const handlers = new Map<string, any>()
const { RunDownloadJob } = await import('#jobs/run_download_job')
const { DownloadModelJob } = await import('#jobs/download_model_job')
handlers.set(RunDownloadJob.key, new RunDownloadJob())
handlers.set(DownloadModelJob.key, new DownloadModelJob())
return handlers
}

View File

@ -1,7 +1,7 @@
import { capitalizeFirstLetter } from '~/lib/util'
import classNames from '~/lib/classNames'
import LoadingSpinner from '~/components/LoadingSpinner'
import React, { RefObject } from 'react'
import React, { RefObject, useState } from 'react'
export type StyledTableProps<T extends { [key: string]: any }> = {
loading?: boolean
@ -23,6 +23,12 @@ export type StyledTableProps<T extends { [key: string]: any }> = {
ref?: RefObject<HTMLDivElement | null>
containerProps?: React.HTMLAttributes<HTMLDivElement>
compact?: boolean
expandable?: {
expandedRowRender: (record: T, index: number) => React.ReactNode
defaultExpandedRowKeys?: (string | number)[]
onExpandedRowsChange?: (expandedKeys: (string | number)[]) => void
expandIconColumnIndex?: number
}
}
function StyledTable<T extends { [key: string]: any }>({
@ -40,11 +46,31 @@ function StyledTable<T extends { [key: string]: any }>({
containerProps = {},
rowLines = true,
compact = false,
expandable,
}: StyledTableProps<T>) {
const { className: tableClassName, ...restTableProps } = tableProps
const [expandedRowKeys, setExpandedRowKeys] = useState<(string | number)[]>(
expandable?.defaultExpandedRowKeys || []
)
const leftPadding = compact ? 'pl-2' : 'pl-4 sm:pl-6'
const isRowExpanded = (record: T, index: number) => {
const key = record.id ?? index
return expandedRowKeys.includes(key)
}
const toggleRowExpansion = (record: T, index: number, event: React.MouseEvent) => {
event.stopPropagation()
const key = record.id ?? index
const newExpandedKeys = expandedRowKeys.includes(key)
? expandedRowKeys.filter((k) => k !== key)
: [...expandedRowKeys, key]
setExpandedRowKeys(newExpandedKeys)
expandable?.onExpandedRowsChange?.(newExpandedKeys)
}
return (
<div
className={classNames(
@ -57,6 +83,14 @@ function StyledTable<T extends { [key: string]: any }>({
<table className="min-w-full overflow-auto" {...restTableProps}>
<thead className='border-b border-gray-200 '>
<tr>
{expandable && (
<th
className={classNames(
'whitespace-nowrap text-left font-semibold text-gray-900 w-12',
compact ? `${leftPadding} py-2` : `${leftPadding} py-4 pr-3`
)}
/>
)}
{columns.map((column, index) => (
<th
key={index}
@ -73,48 +107,90 @@ function StyledTable<T extends { [key: string]: any }>({
<tbody className={tableBodyClassName} style={tableBodyStyle}>
{!loading &&
data.length !== 0 &&
data.map((record, recordIdx) => (
<tr
data-index={'index' in record ? record.index : recordIdx}
key={record.id || recordIdx}
onClick={() => onRowClick?.(record)}
style={{
...tableRowStyle,
height: 'height' in record ? record.height : 'auto',
transform:
'translateY' in record ? 'translateY(' + record.transformY + 'px)' : undefined,
}}
className={classNames(
rowLines ? 'border-b border-gray-200' : '',
onRowClick ? `cursor-pointer hover:bg-gray-100 ` : ''
)}
>
{columns.map((column, index) => (
<td
key={index}
data.map((record, recordIdx) => {
const isExpanded = expandable && isRowExpanded(record, recordIdx)
return (
<React.Fragment key={record.id || recordIdx}>
<tr
data-index={'index' in record ? record.index : recordIdx}
onClick={() => onRowClick?.(record)}
style={{
...tableRowStyle,
height: 'height' in record ? record.height : 'auto',
transform:
'translateY' in record ? 'translateY(' + record.transformY + 'px)' : undefined,
}}
className={classNames(
'relative text-sm whitespace-nowrap max-w-72 truncate break-words text-left',
column.className || '',
compact ? `${leftPadding} py-2` : `${leftPadding} py-4 pr-3`
rowLines ? 'border-b border-gray-200' : '',
onRowClick ? `cursor-pointer hover:bg-gray-100 ` : ''
)}
>
{column.render
? column.render(record, index)
: (record[column.accessor] as React.ReactNode)}
</td>
))}
</tr>
))}
{expandable && (
<td
className={classNames(
'text-sm whitespace-nowrap text-left w-12',
compact ? `${leftPadding} py-2` : `${leftPadding} py-4 pr-3`
)}
onClick={(e) => toggleRowExpansion(record, recordIdx, e)}
>
<button
className="text-gray-500 hover:text-gray-700 focus:outline-none"
aria-label={isExpanded ? 'Collapse row' : 'Expand row'}
>
<svg
className={classNames(
'w-5 h-5 transition-transform',
isExpanded ? 'rotate-90' : ''
)}
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M9 5l7 7-7 7"
/>
</svg>
</button>
</td>
)}
{columns.map((column, index) => (
<td
key={index}
className={classNames(
'relative text-sm whitespace-nowrap max-w-72 truncate break-words text-left',
column.className || '',
compact ? `${leftPadding} py-2` : `${leftPadding} py-4 pr-3`
)}
>
{column.render
? column.render(record, index)
: (record[column.accessor] as React.ReactNode)}
</td>
))}
</tr>
{expandable && isExpanded && (
<tr className="bg-gray-50">
<td colSpan={columns.length + 1}>
{expandable.expandedRowRender(record, recordIdx)}
</td>
</tr>
)}
</React.Fragment>
)
})}
{!loading && data.length === 0 && (
<tr>
<td colSpan={columns.length} className="!text-center py-8 text-gray-500">
<td colSpan={columns.length + (expandable ? 1 : 0)} className="!text-center py-8 text-gray-500">
{noDataText}
</td>
</tr>
)}
{loading && (
<tr className="!h-16">
<td colSpan={columns.length} className="!text-center">
<td colSpan={columns.length + (expandable ? 1 : 0)} className="!text-center">
<LoadingSpinner fullscreen={false} />
</td>
</tr>

View File

@ -3,8 +3,13 @@ import { ListRemoteZimFilesResponse, ListZimFilesResponse } from '../../types/zi
import { ServiceSlim } from '../../types/services'
import { FileEntry } from '../../types/files'
import { SystemInformationResponse, SystemUpdateStatus } from '../../types/system'
import { CuratedCategory, CuratedCollectionWithStatus, DownloadJobWithProgress } from '../../types/downloads'
import {
CuratedCategory,
CuratedCollectionWithStatus,
DownloadJobWithProgress,
} from '../../types/downloads'
import { catchInternal } from './util'
import { NomadOllamaModel } from '../../types/ollama'
class API {
private client: AxiosInstance
@ -28,6 +33,13 @@ class API {
})()
}
async deleteModel(model: string): Promise<{ success: boolean; message: string }> {
return catchInternal(async () => {
const response = await this.client.post('/openwebui/delete-model', { model })
return response.data
})()
}
async downloadBaseMapAssets() {
return catchInternal(async () => {
const response = await this.client.post<{ success: boolean }>('/maps/download-base-assets')
@ -46,6 +58,13 @@ class API {
})()
}
async downloadModel(model: string): Promise<{ success: boolean; message: string }> {
return catchInternal(async () => {
const response = await this.client.post('/openwebui/download-model', { model })
return response.data
})()
}
async downloadZimCollection(slug: string): Promise<{
message: string
slug: string
@ -109,6 +128,15 @@ class API {
})()
}
async getRecommendedModels(): Promise<NomadOllamaModel[] | undefined> {
return catchInternal(async () => {
const response = await this.client.get<NomadOllamaModel[]>('/openwebui/models', {
params: { sort: 'pulls', recommendedOnly: true },
})
return response.data
})()
}
async getSystemInfo() {
return catchInternal(async () => {
const response = await this.client.get<SystemInformationResponse>('/system/info')
@ -169,9 +197,7 @@ class API {
async listCuratedCategories() {
return catchInternal(async () => {
const response = await this.client.get<CuratedCategory[]>(
'/easy-setup/curated-categories'
)
const response = await this.client.get<CuratedCategory[]>('/easy-setup/curated-categories')
return response.data
})()
}

View File

@ -34,7 +34,8 @@ const CORE_CAPABILITIES: Capability[] = [
id: 'information',
name: 'Information Library',
technicalName: 'Kiwix',
description: 'Offline access to Wikipedia, medical references, how-to guides, and encyclopedias',
description:
'Offline access to Wikipedia, medical references, how-to guides, and encyclopedias',
features: [
'Complete Wikipedia offline',
'Medical references and first aid guides',
@ -80,11 +81,7 @@ const ADDITIONAL_TOOLS: Capability[] = [
name: 'Notes',
technicalName: 'FlatNotes',
description: 'Simple note-taking app with local storage',
features: [
'Markdown support',
'All notes stored locally',
'No account required',
],
features: ['Markdown support', 'All notes stored locally', 'No account required'],
services: ['nomad_flatnotes'],
icon: 'IconNotes',
},
@ -110,7 +107,10 @@ const CURATED_ZIM_COLLECTIONS_KEY = 'curated-zim-collections'
const CURATED_CATEGORIES_KEY = 'curated-categories'
// Helper to get all resources for a tier (including inherited resources)
const getAllResourcesForTier = (tier: CategoryTier, allTiers: CategoryTier[]): CategoryResource[] => {
const getAllResourcesForTier = (
tier: CategoryTier,
allTiers: CategoryTier[]
): CategoryResource[] => {
const resources = [...tier.resources]
if (tier.includesTier) {
const includedTier = allTiers.find((t) => t.slug === tier.includesTier)
@ -126,6 +126,7 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const [selectedServices, setSelectedServices] = useState<string[]>([])
const [selectedMapCollections, setSelectedMapCollections] = useState<string[]>([])
const [selectedZimCollections, setSelectedZimCollections] = useState<string[]>([])
const [selectedAiModels, setSelectedAiModels] = useState<string[]>([])
const [isProcessing, setIsProcessing] = useState(false)
const [showAdditionalTools, setShowAdditionalTools] = useState(false)
@ -143,7 +144,8 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
selectedServices.length > 0 ||
selectedMapCollections.length > 0 ||
selectedZimCollections.length > 0 ||
selectedTiers.size > 0
selectedTiers.size > 0 ||
selectedAiModels.length > 0
const { data: mapCollections, isLoading: isLoadingMaps } = useQuery({
queryKey: [CURATED_MAP_COLLECTIONS_KEY],
@ -164,6 +166,12 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
refetchOnWindowFocus: false,
})
const { data: recommendedModels, isLoading: isLoadingRecommendedModels } = useQuery({
queryKey: ['recommended-ollama-models'],
queryFn: () => api.getRecommendedModels(),
refetchOnWindowFocus: false,
})
// All services for display purposes
const allServices = props.system.services
@ -172,9 +180,7 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
)
// Services that are already installed
const installedServices = props.system.services.filter(
(service) => service.installed
)
const installedServices = props.system.services.filter((service) => service.installed)
const toggleMapCollection = (slug: string) => {
setSelectedMapCollections((prev) =>
@ -188,6 +194,12 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
)
}
const toggleAiModel = (modelName: string) => {
setSelectedAiModels((prev) =>
prev.includes(modelName) ? prev.filter((m) => m !== modelName) : [...prev, modelName]
)
}
// Category/tier handlers
const handleCategoryClick = (category: CuratedCategory) => {
if (!isOnline) return
@ -255,7 +267,14 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
}
return totalBytes
}, [selectedTiers, selectedMapCollections, selectedZimCollections, categories, mapCollections, zimCollections])
}, [
selectedTiers,
selectedMapCollections,
selectedZimCollections,
categories,
mapCollections,
zimCollections,
])
// Get primary disk/filesystem info for storage projection
// Try disk array first (Linux/production), fall back to fsSize (Windows/dev)
@ -264,8 +283,8 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const storageInfo = primaryDisk
? { totalSize: primaryDisk.totalSize, totalUsed: primaryDisk.totalUsed }
: primaryFs
? { totalSize: primaryFs.size, totalUsed: primaryFs.used }
: null
? { totalSize: primaryFs.size, totalUsed: primaryFs.used }
: null
const canProceedToNextStep = () => {
if (!isOnline) return false // Must be online to proceed
@ -304,12 +323,13 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
await Promise.all(installPromises)
// Download collections and individual tier resources
// Download collections, individual tier resources, and AI models
const tierResources = getSelectedTierResources()
const downloadPromises = [
...selectedMapCollections.map((slug) => api.downloadMapCollection(slug)),
...selectedZimCollections.map((slug) => api.downloadZimCollection(slug)),
...tierResources.map((resource) => api.downloadRemoteZimFile(resource.url)),
...selectedAiModels.map((modelName) => api.downloadModel(modelName)),
]
await Promise.all(downloadPromises)
@ -469,9 +489,7 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const isSelected = isCapabilitySelected(capability)
if (isSelected) {
// Deselect all services in this capability
setSelectedServices((prev) =>
prev.filter((s) => !capability.services.includes(s))
)
setSelectedServices((prev) => prev.filter((s) => !capability.services.includes(s)))
} else {
// Select all available services in this capability
const servicesToAdd = capability.services.filter((service) =>
@ -538,10 +556,26 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
{capability.description}
</p>
{isCore && (
<ul className={classNames('mt-3 space-y-1', installed ? 'text-gray-600' : selected ? 'text-white' : 'text-gray-600')}>
<ul
className={classNames(
'mt-3 space-y-1',
installed ? 'text-gray-600' : selected ? 'text-white' : 'text-gray-600'
)}
>
{capability.features.map((feature, idx) => (
<li key={idx} className="flex items-start text-sm">
<span className={classNames('mr-2', installed ? 'text-desert-green' : selected ? 'text-white' : 'text-desert-green')}></span>
<span
className={classNames(
'mr-2',
installed
? 'text-desert-green'
: selected
? 'text-white'
: 'text-desert-green'
)}
>
</span>
{feature}
</li>
))}
@ -558,7 +592,9 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
: 'border-desert-stone'
)}
>
{isChecked && <IconCheck size={20} className={installed ? 'text-white' : 'text-desert-green'} />}
{isChecked && (
<IconCheck size={20} className={installed ? 'text-white' : 'text-desert-green'} />
)}
</div>
</div>
</div>
@ -573,8 +609,8 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
// Check if ALL capabilities are already installed (nothing left to install)
const allCoreInstalled = existingCoreCapabilities.every(isCapabilityInstalled)
const allAdditionalInstalled = existingAdditionalTools.every(isCapabilityInstalled)
const allInstalled = allCoreInstalled && allAdditionalInstalled &&
existingCoreCapabilities.length > 0
const allInstalled =
allCoreInstalled && allAdditionalInstalled && existingCoreCapabilities.length > 0
return (
<div className="space-y-8">
@ -587,7 +623,9 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
{allInstalled ? (
<div className="text-center py-12">
<p className="text-gray-600 text-lg">All available capabilities are already installed!</p>
<p className="text-gray-600 text-lg">
All available capabilities are already installed!
</p>
<StyledButton
variant="primary"
className="mt-4"
@ -603,7 +641,9 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
<div>
<h3 className="text-lg font-semibold text-gray-700 mb-4">Core Capabilities</h3>
<div className="grid grid-cols-1 lg:grid-cols-3 gap-4">
{existingCoreCapabilities.map((capability) => renderCapabilityCard(capability, true))}
{existingCoreCapabilities.map((capability) =>
renderCapabilityCard(capability, true)
)}
</div>
</div>
)}
@ -624,7 +664,9 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
</button>
{showAdditionalTools && (
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 mt-4">
{existingAdditionalTools.map((capability) => renderCapabilityCard(capability, false))}
{existingAdditionalTools.map((capability) =>
renderCapabilityCard(capability, false)
)}
</div>
)}
</div>
@ -681,92 +723,210 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
</div>
)
const renderStep3 = () => (
<div className="space-y-6">
<div className="text-center mb-6">
<h2 className="text-3xl font-bold text-gray-900 mb-2">Choose Content Collections</h2>
<p className="text-gray-600">
Select content categories for offline knowledge. Click a category to choose your preferred tier based on storage capacity.
</p>
</div>
const renderStep3 = () => {
// Check if AI or Information capabilities are selected OR already installed
const isAiSelected = selectedServices.includes('nomad_open_webui') ||
installedServices.some((s) => s.service_name === 'nomad_open_webui')
const isInformationSelected = selectedServices.includes('nomad_kiwix_serve') ||
installedServices.some((s) => s.service_name === 'nomad_kiwix_serve')
{/* Curated Categories with Tiers */}
{isLoadingCategories ? (
<div className="flex justify-center py-12">
<LoadingSpinner />
return (
<div className="space-y-6">
<div className="text-center mb-6">
<h2 className="text-3xl font-bold text-gray-900 mb-2">Choose Content</h2>
<p className="text-gray-600">
{isAiSelected && isInformationSelected
? 'Select AI models and content categories for offline use.'
: isAiSelected
? 'Select AI models to download for offline use.'
: isInformationSelected
? 'Select content categories for offline knowledge.'
: 'Configure content for your selected capabilities.'}
</p>
</div>
) : categories && categories.length > 0 ? (
<>
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
{categories.map((category) => (
<CategoryCard
key={category.slug}
category={category}
selectedTier={selectedTiers.get(category.slug) || null}
onClick={handleCategoryClick}
/>
))}
</div>
{/* Tier Selection Modal */}
<TierSelectionModal
isOpen={tierModalOpen}
onClose={closeTierModal}
category={activeCategory}
selectedTierSlug={activeCategory ? selectedTiers.get(activeCategory.slug)?.slug : null}
onSelectTier={handleTierSelect}
/>
</>
) : null}
{/* Legacy flat collections - show if available and no categories */}
{(!categories || categories.length === 0) && (
<>
{isLoadingZims ? (
<div className="flex justify-center py-12">
<LoadingSpinner />
{/* AI Model Selection - Only show if AI capability is selected */}
{isAiSelected && (
<div className="mb-8">
<div className="mb-4">
<h3 className="text-2xl font-semibold text-gray-900 mb-2">Choose AI Models</h3>
<p className="text-gray-600">
Select AI models to download. We've recommended some smaller, popular models to get you started. You'll need at least one to use AI features, but you can always add more later.
</p>
</div>
) : zimCollections && zimCollections.length > 0 ? (
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
{zimCollections.map((collection) => (
<div
key={collection.slug}
onClick={() =>
isOnline && !collection.all_downloaded && toggleZimCollection(collection.slug)
}
className={classNames(
'relative',
selectedZimCollections.includes(collection.slug) &&
'ring-4 ring-desert-green rounded-lg',
collection.all_downloaded && 'opacity-75',
!isOnline && 'opacity-50 cursor-not-allowed'
)}
>
<CuratedCollectionCard collection={collection} size="large" />
{selectedZimCollections.includes(collection.slug) && (
<div className="absolute top-2 right-2 bg-desert-green rounded-full p-1">
<IconCheck size={32} className="text-white" />
{isLoadingRecommendedModels ? (
<div className="flex justify-center py-12">
<LoadingSpinner />
</div>
) : recommendedModels && recommendedModels.length > 0 ? (
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
{recommendedModels.map((model) => (
<div
key={model.name}
onClick={() => isOnline && toggleAiModel(model.name)}
className={classNames(
'p-4 rounded-lg border-2 transition-all cursor-pointer',
selectedAiModels.includes(model.name)
? 'border-desert-green bg-desert-green shadow-md'
: 'border-desert-stone-light bg-white hover:border-desert-green hover:shadow-sm',
!isOnline && 'opacity-50 cursor-not-allowed'
)}
>
<div className="flex items-start justify-between">
<div className="flex-1">
<h4
className={classNames(
'text-lg font-semibold mb-1',
selectedAiModels.includes(model.name) ? 'text-white' : 'text-gray-900'
)}
>
{model.name}
</h4>
<p
className={classNames(
'text-sm mb-2',
selectedAiModels.includes(model.name) ? 'text-white' : 'text-gray-600'
)}
>
{model.description}
</p>
{model.tags?.[0]?.size && (
<div
className={classNames(
'text-xs',
selectedAiModels.includes(model.name)
? 'text-green-100'
: 'text-gray-500'
)}
>
Size: {model.tags[0].size}
</div>
)}
</div>
<div
className={classNames(
'ml-4 w-6 h-6 rounded-full border-2 flex items-center justify-center transition-all flex-shrink-0',
selectedAiModels.includes(model.name)
? 'border-white bg-white'
: 'border-desert-stone'
)}
>
{selectedAiModels.includes(model.name) && (
<IconCheck size={16} className="text-desert-green" />
)}
</div>
</div>
)}
</div>
))}
</div>
) : (
<div className="text-center py-8 bg-gray-50 rounded-lg">
<p className="text-gray-600">No recommended AI models available at this time.</p>
</div>
)}
</div>
)}
{/* Curated Categories with Tiers - Only show if Information capability is selected */}
{isInformationSelected && (
<>
{isLoadingCategories ? (
<div className="flex justify-center py-12">
<LoadingSpinner />
</div>
) : categories && categories.length > 0 ? (
<>
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
{categories.map((category) => (
<CategoryCard
key={category.slug}
category={category}
selectedTier={selectedTiers.get(category.slug) || null}
onClick={handleCategoryClick}
/>
))}
</div>
))}
</div>
) : (
<div className="text-center py-12">
<p className="text-gray-600 text-lg">No content collections available at this time.</p>
</div>
)}
</>
)}
</div>
)
{/* Tier Selection Modal */}
<TierSelectionModal
isOpen={tierModalOpen}
onClose={closeTierModal}
category={activeCategory}
selectedTierSlug={
activeCategory ? selectedTiers.get(activeCategory.slug)?.slug : null
}
onSelectTier={handleTierSelect}
/>
</>
) : null}
{/* Legacy flat collections - show if available and no categories */}
{(!categories || categories.length === 0) && (
<>
{isLoadingZims ? (
<div className="flex justify-center py-12">
<LoadingSpinner />
</div>
) : zimCollections && zimCollections.length > 0 ? (
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
{zimCollections.map((collection) => (
<div
key={collection.slug}
onClick={() =>
isOnline &&
!collection.all_downloaded &&
toggleZimCollection(collection.slug)
}
className={classNames(
'relative',
selectedZimCollections.includes(collection.slug) &&
'ring-4 ring-desert-green rounded-lg',
collection.all_downloaded && 'opacity-75',
!isOnline && 'opacity-50 cursor-not-allowed'
)}
>
<CuratedCollectionCard collection={collection} size="large" />
{selectedZimCollections.includes(collection.slug) && (
<div className="absolute top-2 right-2 bg-desert-green rounded-full p-1">
<IconCheck size={32} className="text-white" />
</div>
)}
</div>
))}
</div>
) : (
<div className="text-center py-12">
<p className="text-gray-600 text-lg">
No content collections available at this time.
</p>
</div>
)}
</>
)}
</>
)}
{/* Show message if no capabilities requiring content are selected */}
{!isAiSelected && !isInformationSelected && (
<div className="text-center py-12">
<p className="text-gray-600 text-lg">
No content-based capabilities selected. You can skip this step or go back to select
capabilities that require content.
</p>
</div>
)}
</div>
)
}
const renderStep4 = () => {
const hasSelections =
selectedServices.length > 0 ||
selectedMapCollections.length > 0 ||
selectedZimCollections.length > 0 ||
selectedTiers.size > 0
selectedTiers.size > 0 ||
selectedAiModels.length > 0
return (
<div className="space-y-6">
@ -797,7 +957,9 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
<IconCheck size={20} className="text-desert-green mr-2" />
<span className="text-gray-700">
{capability.name}
<span className="text-gray-400 text-sm ml-2">({capability.technicalName})</span>
<span className="text-gray-400 text-sm ml-2">
({capability.technicalName})
</span>
</span>
</li>
))}
@ -876,6 +1038,30 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
</div>
)}
{selectedAiModels.length > 0 && (
<div className="bg-white rounded-lg border-2 border-desert-stone-light p-6">
<h3 className="text-xl font-semibold text-gray-900 mb-4">
AI Models to Download ({selectedAiModels.length})
</h3>
<ul className="space-y-2">
{selectedAiModels.map((modelName) => {
const model = recommendedModels?.find((m) => m.name === modelName)
return (
<li key={modelName} className="flex items-center justify-between">
<div className="flex items-center">
<IconCheck size={20} className="text-desert-green mr-2" />
<span className="text-gray-700">{modelName}</span>
</div>
{model?.tags?.[0]?.size && (
<span className="text-gray-500 text-sm">{model.tags[0].size}</span>
)}
</li>
)
})}
</ul>
</div>
)}
<Alert
title="Ready to Start"
message="Click 'Complete Setup' to begin installing apps and downloading content. This may take some time depending on your internet connection and the size of the downloads."
@ -937,10 +1123,11 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
cap.services.some((s) => selectedServices.includes(s))
).length
return `${count} ${count === 1 ? 'capability' : 'capabilities'}`
})()},{' '}
{selectedMapCollections.length} map region
{selectedMapCollections.length !== 1 && 's'}, {selectedZimCollections.length} content
pack{selectedZimCollections.length !== 1 && 's'} selected
})()}
, {selectedMapCollections.length} map region
{selectedMapCollections.length !== 1 && 's'}, {selectedZimCollections.length}{' '}
content pack{selectedZimCollections.length !== 1 && 's'},{' '}
{selectedAiModels.length} AI model{selectedAiModels.length !== 1 && 's'} selected
</p>
</div>

View File

@ -1,64 +1,198 @@
import { Head } from '@inertiajs/react'
import { Head, router } from '@inertiajs/react'
import StyledTable from '~/components/StyledTable'
import SettingsLayout from '~/layouts/SettingsLayout'
import { ServiceSlim } from '../../../types/services'
import { getServiceLink } from '~/lib/navigation'
import LoadingSpinner from '~/components/LoadingSpinner'
import { IconCheck } from '@tabler/icons-react'
import { useState } from 'react'
import { NomadOllamaModel, OllamaModelListing } from '../../../types/ollama'
import StyledButton from '~/components/StyledButton'
import useServiceInstalledStatus from '~/hooks/useServiceInstalledStatus'
import Alert from '~/components/Alert'
import { useNotifications } from '~/context/NotificationContext'
import api from '~/lib/api'
import { useModals } from '~/context/ModalContext'
import StyledModal from '~/components/StyledModal'
export default function ModelsPage(props: {
models: { availableModels: NomadOllamaModel[]; installedModels: OllamaModelListing[] }
}) {
const { isInstalled } = useServiceInstalledStatus('nomad_openwebui')
const { addNotification } = useNotifications()
const { openModal, closeAllModals } = useModals()
async function handleInstallModel(modelName: string) {
try {
const res = await api.downloadModel(modelName)
if (res.success) {
addNotification({
message: `Model download initiated for ${modelName}. It may take some time to complete.`,
type: 'success',
})
}
} catch (error) {
console.error('Error installing model:', error)
addNotification({
message: `There was an error installing the model: ${modelName}. Please try again.`,
type: 'error',
})
}
}
async function handleDeleteModel(modelName: string) {
try {
const res = await api.deleteModel(modelName)
if (res.success) {
addNotification({
message: `Model deleted: ${modelName}.`,
type: 'success',
})
}
closeAllModals()
router.reload()
} catch (error) {
console.error('Error deleting model:', error)
addNotification({
message: `There was an error deleting the model: ${modelName}. Please try again.`,
type: 'error',
})
}
}
async function confirmDeleteModel(model: string) {
openModal(
<StyledModal
title="Delete Model?"
onConfirm={() => {
handleDeleteModel(model)
}}
onCancel={closeAllModals}
open={true}
confirmText="Delete"
cancelText="Cancel"
confirmVariant="primary"
>
<p className="text-gray-700">
Are you sure you want to delete this model? You will need to download it again if you want
to use it in the future.
</p>
</StyledModal>,
'confirm-delete-model-modal'
)
}
export default function ModelsPage(props: { models: { installedModels: string[] } }) {
const [loading, setLoading] = useState(false)
return (
<SettingsLayout>
<Head title="App Settings" />
<div className="xl:pl-72 w-full">
<main className="px-12 py-6">
<h1 className="text-4xl font-semibold mb-4">Models</h1>
<p className="text-gray-500 mb-4">Easily manage the AI models available for Open WebUI</p>
{loading && <LoadingSpinner fullscreen />}
{!loading && (
<StyledTable<ServiceSlim & { actions?: any }>
className="font-semibold"
rowLines={true}
columns={[
{
accessor: 'friendly_name',
title: 'Name',
render(record) {
return (
<div className="flex flex-col">
<p>{record.friendly_name || record.service_name}</p>
<p className="text-sm text-gray-500">{record.description}</p>
</div>
)
},
},
{
accessor: 'ui_location',
title: 'Port',
render: (record) => (
<a
href={getServiceLink(record.ui_location || 'unknown')}
target="_blank"
rel="noopener noreferrer"
className="text-desert-green hover:underline font-semibold"
>
{record.ui_location}
</a>
),
},
{
accessor: 'installed',
title: 'Installed',
render: (record) =>
record.installed ? <IconCheck className="h-6 w-6 text-desert-green" /> : '',
},
]}
data={[]}
<p className="text-gray-500 mb-4">
Easily manage the AI models available for Open WebUI. We recommend starting with smaller
models first to see how they perform on your system before moving on to larger ones.
</p>
{!isInstalled && (
<Alert
title="The Open WebUI service is not installed. Please install it to manage AI models."
type="warning"
variant="solid"
className="!mt-6"
/>
)}
<StyledTable<NomadOllamaModel>
className="font-semibold mt-8"
rowLines={true}
columns={[
{
accessor: 'name',
title: 'Name',
render(record) {
return (
<div className="flex flex-col">
<p className="text-lg font-semibold">{record.name}</p>
<p className="text-sm text-gray-500">{record.description}</p>
</div>
)
},
},
{
accessor: 'estimated_pulls',
title: 'Estimated Pulls',
},
{
accessor: 'model_last_updated',
title: 'Last Updated',
},
]}
data={props.models.availableModels || []}
expandable={{
expandedRowRender: (record) => (
<div className="pl-14">
<div className="bg-white overflow-hidden">
<table className="min-w-full divide-y divide-gray-200">
<thead className="bg-white">
<tr>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">
Tag
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">
Input Type
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">
Context Size
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">
Model Size
</th>
<th className="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">
Action
</th>
</tr>
</thead>
<tbody className="bg-white divide-y divide-gray-200">
{record.tags.map((tag, tagIndex) => {
const isInstalled = props.models.installedModels.some(
(mod) => mod.name === tag.name
)
return (
<tr key={tagIndex} className="hover:bg-slate-50">
<td className="px-6 py-4 whitespace-nowrap">
<span className="text-sm font-medium text-gray-900">
{tag.name}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="text-sm text-gray-600">{tag.input || 'N/A'}</span>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="text-sm text-gray-600">
{tag.context || 'N/A'}
</span>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<span className="text-sm text-gray-600">{tag.size || 'N/A'}</span>
</td>
<td className="px-6 py-4 whitespace-nowrap">
<StyledButton
variant={isInstalled ? 'danger' : 'primary'}
onClick={() => {
if (!isInstalled) {
handleInstallModel(tag.name)
} else {
confirmDeleteModel(tag.name)
}
}}
icon={isInstalled ? 'TrashIcon' : 'ArrowDownTrayIcon'}
>
{isInstalled ? 'Delete' : 'Install'}
</StyledButton>
</td>
</tr>
)
})}
</tbody>
</table>
</div>
</div>
),
}}
/>
</main>
</div>
</SettingsLayout>

View File

@ -8,13 +8,13 @@
"scripts": {
"start": "node bin/server.js",
"build": "node ace build",
"postinstall": "playwright install chromium --with-deps",
"dev": "node ace serve --hmr",
"test": "node ace test",
"lint": "eslint .",
"format": "prettier --write .",
"typecheck": "tsc --noEmit",
"work:downloads": "node ace queue:work --queue=downloads"
"work:downloads": "node ace queue:work --queue=downloads",
"work:model-downloads": "node ace queue:work --queue=model-downloads"
},
"imports": {
"#controllers/*": "./app/controllers/*.js",
@ -92,7 +92,6 @@
"maplibre-gl": "^4.7.1",
"mysql2": "^3.14.1",
"pino-pretty": "^13.0.0",
"playwright": "^1.57.0",
"pmtiles": "^4.3.0",
"postcss": "^8.5.6",
"react": "^19.1.0",

View File

@ -11,6 +11,7 @@ import DownloadsController from '#controllers/downloads_controller'
import EasySetupController from '#controllers/easy_setup_controller'
import HomeController from '#controllers/home_controller'
import MapsController from '#controllers/maps_controller'
import OpenWebUIController from '#controllers/openwebui_controller'
import SettingsController from '#controllers/settings_controller'
import SystemController from '#controllers/system_controller'
import ZimController from '#controllers/zim_controller'
@ -86,6 +87,15 @@ router.get('/api/health', () => {
return { status: 'ok' }
})
router
.group(() => {
router.get('/models', [OpenWebUIController, 'models'])
router.get('/installed-models', [OpenWebUIController, 'installedModels'])
router.post('/download-model', [OpenWebUIController, 'dispatchModelDownload'])
router.post('/delete-model', [OpenWebUIController, 'deleteModel'])
})
.prefix('/api/openwebui')
router
.group(() => {
router.get('/info', [SystemController, 'getSystemInfo'])

29
admin/types/ollama.ts Normal file
View File

@ -0,0 +1,29 @@
export type NomadOllamaModel = {
id: string
name: string
description: string
estimated_pulls: string
model_last_updated: string
first_seen: string
tags: NomadOllamaModelTag[]
}
export type NomadOllamaModelTag = {
name: string
size: string
context: string
input: string
}
export type NomadOllamaModelAPIResponse = {
success: boolean
message: string
models: NomadOllamaModel[]
}
export type OllamaModelListing = {
name: string
id: string
size: string
modified: string
}

View File

@ -17,9 +17,10 @@ node ace migration:run --force
echo "Seeding the database..."
node ace db:seed
# Start background worker for queues
echo "Starting background worker for queues..."
# Start background workers for queues
echo "Starting background workers for queues..."
node ace queue:work --queue=downloads &
node ace queue:work --queue=model-downloads &
# Start the AdonisJS application
echo "Starting AdonisJS application..."