fix: remove Open WebUI

This commit is contained in:
Jake Turner 2026-02-01 04:37:14 +00:00 committed by Jake Turner
parent 0da050c5a3
commit adf76d272e
8 changed files with 2032 additions and 2115 deletions

View File

@ -14,7 +14,6 @@ export class DockerService {
public static KIWIX_SERVICE_NAME = 'nomad_kiwix_serve'
public static OLLAMA_SERVICE_NAME = 'nomad_ollama'
public static QDRANT_SERVICE_NAME = 'nomad_qdrant'
public static OPEN_WEBUI_SERVICE_NAME = 'nomad_open_webui'
public static CYBERCHEF_SERVICE_NAME = 'nomad_cyberchef'
public static FLATNOTES_SERVICE_NAME = 'nomad_flatnotes'
public static KOLIBRI_SERVICE_NAME = 'nomad_kolibri'

View File

@ -26,10 +26,10 @@ export default class extends BaseSchema {
await this.db.rawQuery(`
UPDATE services SET
friendly_name = 'AI Assistant',
powered_by = 'Open WebUI + Ollama',
powered_by = 'Ollama',
display_order = 3,
description = 'Local AI chat that runs entirely on your hardware - no internet required'
WHERE service_name = 'nomad_open_webui'
WHERE service_name = 'nomad_ollama'
`)
await this.db.rawQuery(`
@ -49,12 +49,6 @@ export default class extends BaseSchema {
description = 'Swiss Army knife for data encoding, encryption, and analysis'
WHERE service_name = 'nomad_cyberchef'
`)
await this.db.rawQuery(`
UPDATE services SET
display_order = 100
WHERE service_name = 'nomad_ollama'
`)
}
async down() {
@ -79,11 +73,11 @@ export default class extends BaseSchema {
await this.db.rawQuery(`
UPDATE services SET
friendly_name = 'Open WebUI',
friendly_name = 'Ollama',
powered_by = NULL,
display_order = NULL,
description = 'A web interface for interacting with local AI models served by Ollama'
WHERE service_name = 'nomad_open_webui'
description = 'Local AI chat that runs entirely on your hardware - no internet required'
WHERE service_name = 'nomad_ollama'
`)
await this.db.rawQuery(`
@ -103,11 +97,5 @@ export default class extends BaseSchema {
description = 'The Cyber Swiss Army Knife - a web app for encryption, encoding, and data analysis'
WHERE service_name = 'nomad_cyberchef'
`)
await this.db.rawQuery(`
UPDATE services SET
display_order = NULL
WHERE service_name = 'nomad_ollama'
`)
}
}

View File

@ -3,7 +3,6 @@ import { DockerService } from '#services/docker_service'
import { BaseSeeder } from '@adonisjs/lucid/seeders'
import { ModelAttributes } from '@adonisjs/lucid/types/model'
import env from '#start/env'
import { RagService } from '#services/rag_service'
export default class ServiceSeeder extends BaseSeeder {
// Use environment variable with fallback to production default
@ -64,11 +63,11 @@ export default class ServiceSeeder extends BaseSeeder {
},
{
service_name: DockerService.OLLAMA_SERVICE_NAME,
friendly_name: 'Ollama',
powered_by: null,
display_order: 100, // Dependency service, not shown directly
description: 'Run local LLMs (AI models) with ease on your own hardware',
icon: 'IconRobot',
friendly_name: 'AI Assistant',
powered_by: 'Ollama',
display_order: 3,
description: 'Local AI chat that runs entirely on your hardware - no internet required',
icon: 'IconWand',
container_image: 'ollama/ollama:latest',
container_command: 'serve',
container_config: JSON.stringify({
@ -82,40 +81,8 @@ export default class ServiceSeeder extends BaseSeeder {
ui_location: null,
installed: false,
installation_status: 'idle',
is_dependency_service: true,
depends_on: DockerService.QDRANT_SERVICE_NAME,
},
{
service_name: DockerService.OPEN_WEBUI_SERVICE_NAME,
friendly_name: 'AI Assistant',
powered_by: 'Open WebUI + Ollama',
display_order: 3,
description: 'Local AI chat that runs entirely on your hardware - no internet required',
icon: 'IconWand',
container_image: 'ghcr.io/open-webui/open-webui:main',
container_command: null,
container_config: JSON.stringify({
HostConfig: {
RestartPolicy: { Name: 'unless-stopped' },
NetworkMode: 'host',
Binds: [`${ServiceSeeder.NOMAD_STORAGE_ABS_PATH}/open-webui:/app/backend/data`],
PortBindings: { '8080/tcp': [{ HostPort: '3000' }] },
},
Env: [
'WEBUI_AUTH=False',
'PORT=3000',
'OLLAMA_BASE_URL=http://127.0.0.1:11434',
'VECTOR_DB=qdrant',
'QDRANT_URI=http://127.0.0.1:6333',
'RAG_EMBEDDING_ENGINE=ollama',
`RAG_EMBEDDING_MODEL=${RagService.EMBEDDING_MODEL}`,
],
}),
ui_location: '3000',
installed: false,
installation_status: 'idle',
is_dependency_service: false,
depends_on: DockerService.OLLAMA_SERVICE_NAME,
depends_on: DockerService.QDRANT_SERVICE_NAME,
},
{
service_name: DockerService.CYBERCHEF_SERVICE_NAME,

View File

@ -62,7 +62,7 @@ const CORE_CAPABILITIES: Capability[] = [
{
id: 'ai',
name: 'AI Assistant',
technicalName: 'Open WebUI + Ollama',
technicalName: 'Ollama',
description: 'Local AI chat that runs entirely on your hardware - no internet required',
features: [
'Private conversations that never leave your device',
@ -70,7 +70,7 @@ const CORE_CAPABILITIES: Capability[] = [
'Ask questions, get help with writing, brainstorm ideas',
'Runs on your own hardware with local AI models',
],
services: ['nomad_open_webui'], // ollama is auto-installed as dependency
services: ['nomad_ollama'],
icon: 'IconRobot',
},
]
@ -822,10 +822,10 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const renderStep3 = () => {
// Check if AI or Information capabilities are selected OR already installed
const isAiSelected = selectedServices.includes('nomad_open_webui') ||
installedServices.some((s) => s.service_name === 'nomad_open_webui')
const isInformationSelected = selectedServices.includes('nomad_kiwix_serve') ||
installedServices.some((s) => s.service_name === 'nomad_kiwix_serve')
const isAiSelected = selectedServices.includes('nomad_ollama') ||
installedServices.some((s) => s.service_name === 'nomad_ollama')
const isInformationSelected = selectedServices.includes('nomad_kiwix') ||
installedServices.some((s) => s.service_name === 'nomad_kiwix')
return (
<div className="space-y-6">

View File

@ -126,7 +126,7 @@ export default function Home(props: {
// Add system items
items.push(...SYSTEM_ITEMS)
if (props.system.services.find((s) => s.service_name === 'nomad_open_webui' && s.installed)) {
if (props.system.services.find((s) => s.service_name === 'nomad_ollama' && s.installed)) {
items.push(KNOWLEDGE_BASE_ITEM)
}

View File

@ -14,7 +14,7 @@ import { ModelResponse } from 'ollama'
export default function ModelsPage(props: {
models: { availableModels: NomadOllamaModel[]; installedModels: ModelResponse[] }
}) {
const { isInstalled } = useServiceInstalledStatus('nomad_open_webui')
const { isInstalled } = useServiceInstalledStatus('nomad_ollama')
const { addNotification } = useNotifications()
const { openModal, closeAllModals } = useModals()
@ -85,12 +85,12 @@ export default function ModelsPage(props: {
<main className="px-12 py-6">
<h1 className="text-4xl font-semibold mb-4">AI Model Manager</h1>
<p className="text-gray-500 mb-4">
Easily manage the AI models available for Open WebUI. We recommend starting with smaller
Easily manage the AI models available for AI Assistant. We recommend starting with smaller
models first to see how they perform on your system before moving on to larger ones.
</p>
{!isInstalled && (
<Alert
title="The Open WebUI service is not installed. Please install it to manage AI models."
title="AI Assistant's dependencies are not installed. Please install them to manage AI models."
type="warning"
variant="solid"
className="!mt-6"
@ -179,7 +179,7 @@ export default function ModelsPage(props: {
confirmDeleteModel(tag.name)
}
}}
icon={isInstalled ? 'TrashIcon' : 'ArrowDownTrayIcon'}
icon={isInstalled ? 'IconTrash' : 'IconDownload'}
>
{isInstalled ? 'Delete' : 'Install'}
</StyledButton>

4055
admin/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,3 @@
export type KVStoreKey = 'open_webui_knowledge_id'
export type KVStoreKey = ''
export type KVStoreValue = string | null