feat: Add ability to use a remote ollama instance on LAN

This adds a new setting in the chat app under "models & settings" where
the user can set "Remote Ollama URL" to an IP or hostname of another
device on the network running ollama which is also running with the
setting "OLLAMA_HOST=0.0.0.0:11434"
This commit is contained in:
Henry Estela 2026-03-12 13:54:36 -07:00
parent 5d97d471d0
commit 75ea37d249
No known key found for this signature in database
GPG Key ID: 90439853E9E235BA
9 changed files with 254 additions and 8 deletions

View File

@ -1,11 +1,15 @@
import { ChatService } from '#services/chat_service'
import { DockerService } from '#services/docker_service'
import { OllamaService } from '#services/ollama_service'
import { RagService } from '#services/rag_service'
import Service from '#models/service'
import KVStore from '#models/kv_store'
import { modelNameSchema } from '#validators/download'
import { chatSchema, getAvailableModelsSchema } from '#validators/ollama'
import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
import { DEFAULT_QUERY_REWRITE_MODEL, RAG_CONTEXT_LIMITS, SYSTEM_PROMPTS } from '../../constants/ollama.js'
import { SERVICE_NAMES } from '../../constants/service_names.js'
import logger from '@adonisjs/core/services/logger'
import type { Message } from 'ollama'
@ -13,6 +17,7 @@ import type { Message } from 'ollama'
export default class OllamaController {
constructor(
private chatService: ChatService,
private dockerService: DockerService,
private ollamaService: OllamaService,
private ragService: RagService
) { }
@ -171,6 +176,72 @@ export default class OllamaController {
}
}
async configureRemote({ request, response }: HttpContext) {
const remoteUrl: string | null = request.input('remoteUrl', null)
const ollamaService = await Service.query().where('service_name', SERVICE_NAMES.OLLAMA).first()
if (!ollamaService) {
return response.status(404).send({ success: false, message: 'Ollama service record not found.' })
}
// Clear path: null or empty URL removes remote config and marks service as not installed
if (!remoteUrl || remoteUrl.trim() === '') {
await KVStore.clearValue('ai.remoteOllamaUrl')
ollamaService.installed = false
ollamaService.installation_status = 'idle'
await ollamaService.save()
return { success: true, message: 'Remote Ollama configuration cleared.' }
}
// Validate URL format
if (!remoteUrl.startsWith('http')) {
return response.status(400).send({
success: false,
message: 'Invalid URL. Must start with http:// or https://',
})
}
// Test connectivity
try {
const testResponse = await fetch(`${remoteUrl.replace(/\/$/, '')}/api/tags`, {
signal: AbortSignal.timeout(5000),
})
if (!testResponse.ok) {
return response.status(400).send({
success: false,
message: `Could not connect to Ollama at ${remoteUrl} (HTTP ${testResponse.status}). Make sure Ollama is running with OLLAMA_HOST=0.0.0.0.`,
})
}
} catch (error) {
return response.status(400).send({
success: false,
message: `Could not connect to Ollama at ${remoteUrl}. Make sure the host is reachable and Ollama is running with OLLAMA_HOST=0.0.0.0.`,
})
}
// Save remote URL and mark service as installed
await KVStore.setValue('ai.remoteOllamaUrl', remoteUrl.trim())
ollamaService.installed = true
ollamaService.installation_status = 'idle'
await ollamaService.save()
// Install Qdrant if not already installed (fire-and-forget)
const qdrantService = await Service.query().where('service_name', SERVICE_NAMES.QDRANT).first()
if (qdrantService && !qdrantService.installed) {
this.dockerService.createContainerPreflight(SERVICE_NAMES.QDRANT).catch((error) => {
logger.error('[OllamaController] Failed to start Qdrant preflight:', error)
})
}
// Mirror post-install side effects: disable suggestions, trigger docs discovery
await KVStore.setValue('chat.suggestionsEnabled', false)
this.ragService.discoverNomadDocs().catch((error) => {
logger.error('[OllamaController] Failed to discover Nomad docs:', error)
})
return { success: true, message: 'Remote Ollama configured.' }
}
async deleteModel({ request }: HttpContext) {
const reqData = await request.validateUsing(modelNameSchema)
await this.ollamaService.deleteModel(reqData.model)

View File

@ -55,6 +55,7 @@ export default class SettingsController {
const installedModels = await this.ollamaService.getModels();
const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled')
const aiAssistantCustomName = await KVStore.getValue('ai.assistantCustomName')
const remoteOllamaUrl = await KVStore.getValue('ai.remoteOllamaUrl')
return inertia.render('settings/models', {
models: {
availableModels: availableModels?.models || [],
@ -62,6 +63,7 @@ export default class SettingsController {
settings: {
chatSuggestionsEnabled: chatSuggestionsEnabled ?? false,
aiAssistantCustomName: aiAssistantCustomName ?? '',
remoteOllamaUrl: remoteOllamaUrl ?? '',
}
}
});

View File

@ -140,6 +140,11 @@ export class DockerService {
return null
}
if (serviceName === SERVICE_NAMES.OLLAMA) {
const remoteUrl = await KVStore.getValue('ai.remoteOllamaUrl')
if (remoteUrl) return remoteUrl
}
const service = await Service.query()
.where('service_name', serviceName)
.andWhere('installed', true)

View File

@ -1,3 +1,3 @@
import { KVStoreKey } from "../types/kv_store.js";
export const SETTINGS_KEYS: KVStoreKey[] = ['chat.suggestionsEnabled', 'chat.lastModel', 'ui.hasVisitedEasySetup', 'system.earlyAccess', 'ai.assistantCustomName'];
export const SETTINGS_KEYS: KVStoreKey[] = ['chat.suggestionsEnabled', 'chat.lastModel', 'ui.hasVisitedEasySetup', 'system.earlyAccess', 'ai.assistantCustomName', 'ai.remoteOllamaUrl'];

View File

@ -43,6 +43,16 @@ class API {
})()
}
async configureRemoteOllama(remoteUrl: string | null): Promise<{ success: boolean; message: string }> {
return catchInternal(async () => {
const response = await this.client.post<{ success: boolean; message: string }>(
'/ollama/configure-remote',
{ remoteUrl }
)
return response.data
})()
}
async deleteModel(model: string): Promise<{ success: boolean; message: string }> {
return catchInternal(async () => {
const response = await this.client.delete('/ollama/models', { data: { model } })

View File

@ -121,6 +121,9 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const [selectedAiModels, setSelectedAiModels] = useState<string[]>([])
const [isProcessing, setIsProcessing] = useState(false)
const [showAdditionalTools, setShowAdditionalTools] = useState(false)
const [remoteOllamaEnabled, setRemoteOllamaEnabled] = useState(false)
const [remoteOllamaUrl, setRemoteOllamaUrl] = useState('')
const [remoteOllamaUrlError, setRemoteOllamaUrlError] = useState<string | null>(null)
// Category/tier selection state
const [selectedTiers, setSelectedTiers] = useState<Map<string, SpecTier>>(new Map())
@ -357,8 +360,24 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
setIsProcessing(true)
try {
// If using remote Ollama, configure it first before other installs
if (remoteOllamaEnabled && remoteOllamaUrl) {
const remoteResult = await api.configureRemoteOllama(remoteOllamaUrl)
if (!remoteResult?.success) {
const msg = (remoteResult as any)?.message || 'Failed to configure remote Ollama.'
setRemoteOllamaUrlError(msg)
setIsProcessing(false)
setCurrentStep(1)
return
}
}
// All of these ops don't actually wait for completion, they just kick off the process, so we can run them in parallel without awaiting each one sequentially
const installPromises = selectedServices.map((serviceName) => api.installService(serviceName))
// Exclude Ollama from local install when using remote mode
const servicesToInstall = remoteOllamaEnabled
? selectedServices.filter((s) => s !== SERVICE_NAMES.OLLAMA)
: selectedServices
const installPromises = servicesToInstall.map((serviceName) => api.installService(serviceName))
await Promise.all(installPromises)
@ -687,9 +706,53 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
<div>
<h3 className="text-lg font-semibold text-gray-700 mb-4">Core Capabilities</h3>
<div className="grid grid-cols-1 lg:grid-cols-3 gap-4">
{existingCoreCapabilities.map((capability) =>
renderCapabilityCard(capability, true)
)}
{existingCoreCapabilities.map((capability) => {
if (capability.id === 'ai') {
const isAiSelected = isCapabilitySelected(capability)
return (
<div key={capability.id}>
{renderCapabilityCard(capability, true)}
{isAiSelected && !isCapabilityInstalled(capability) && (
<div
className="mt-2 p-4 bg-gray-50 rounded-lg border border-gray-200"
onClick={(e) => e.stopPropagation()}
>
<label className="flex items-center gap-2 cursor-pointer select-none">
<input
type="checkbox"
checked={remoteOllamaEnabled}
onChange={(e) => {
setRemoteOllamaEnabled(e.target.checked)
setRemoteOllamaUrlError(null)
}}
className="w-4 h-4 accent-desert-green"
/>
<span className="text-sm font-medium text-gray-700">Use remote Ollama instance</span>
</label>
{remoteOllamaEnabled && (
<div className="mt-3">
<input
type="text"
value={remoteOllamaUrl}
onChange={(e) => {
setRemoteOllamaUrl(e.target.value)
setRemoteOllamaUrlError(null)
}}
placeholder="http://192.168.1.100:11434"
className="w-full px-3 py-2 text-sm border border-gray-300 rounded-md focus:outline-none focus:ring-1 focus:ring-desert-green"
/>
{remoteOllamaUrlError && (
<p className="mt-1 text-xs text-red-600">{remoteOllamaUrlError}</p>
)}
</div>
)}
</div>
)}
</div>
)
}
return renderCapabilityCard(capability, true)
})}
</div>
</div>
)}
@ -803,8 +866,14 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
<p className="text-sm text-gray-500">Select models to download for offline AI</p>
</div>
</div>
{isLoadingRecommendedModels ? (
{remoteOllamaEnabled && remoteOllamaUrl ? (
<Alert
title="Remote Ollama selected"
message="Models are managed on the remote machine. You can add models from Settings > AI Assistant after setup."
type="info"
variant="bordered"
/>
) : isLoadingRecommendedModels ? (
<div className="flex justify-center py-12">
<LoadingSpinner />
</div>

View File

@ -25,7 +25,7 @@ export default function ModelsPage(props: {
models: {
availableModels: NomadOllamaModel[]
installedModels: ModelResponse[]
settings: { chatSuggestionsEnabled: boolean; aiAssistantCustomName: string }
settings: { chatSuggestionsEnabled: boolean; aiAssistantCustomName: string; remoteOllamaUrl: string }
}
}) {
const { aiAssistantName } = usePage<{ aiAssistantName: string }>().props
@ -97,6 +97,43 @@ export default function ModelsPage(props: {
const [aiAssistantCustomName, setAiAssistantCustomName] = useState(
props.models.settings.aiAssistantCustomName
)
const [remoteOllamaUrl, setRemoteOllamaUrl] = useState(props.models.settings.remoteOllamaUrl)
const [remoteOllamaError, setRemoteOllamaError] = useState<string | null>(null)
const [remoteOllamaSaving, setRemoteOllamaSaving] = useState(false)
async function handleSaveRemoteOllama() {
setRemoteOllamaError(null)
setRemoteOllamaSaving(true)
try {
const res = await api.configureRemoteOllama(remoteOllamaUrl || null)
if (res?.success) {
addNotification({ message: res.message, type: 'success' })
router.reload()
}
} catch (error: any) {
const msg = error?.response?.data?.message || error?.message || 'Failed to configure remote Ollama.'
setRemoteOllamaError(msg)
} finally {
setRemoteOllamaSaving(false)
}
}
async function handleClearRemoteOllama() {
setRemoteOllamaError(null)
setRemoteOllamaSaving(true)
try {
const res = await api.configureRemoteOllama(null)
if (res?.success) {
setRemoteOllamaUrl('')
addNotification({ message: 'Remote Ollama configuration cleared.', type: 'success' })
router.reload()
}
} catch (error: any) {
setRemoteOllamaError(error?.message || 'Failed to clear remote Ollama.')
} finally {
setRemoteOllamaSaving(false)
}
}
const [query, setQuery] = useState('')
const [queryUI, setQueryUI] = useState('')
@ -286,6 +323,56 @@ export default function ModelsPage(props: {
/>
</div>
</div>
<StyledSectionHeader title="Remote Connection" className="mt-8 mb-4" />
<div className="bg-white rounded-lg border-2 border-gray-200 p-6">
<p className="text-sm text-gray-500 mb-4">
Connect to an Ollama instance running on another machine in your local network.
The remote host must be started with <code className="bg-gray-100 px-1 rounded">OLLAMA_HOST=0.0.0.0</code>.
</p>
{props.models.settings.remoteOllamaUrl && (
<p className="text-sm text-green-700 bg-green-50 border border-green-200 rounded px-3 py-2 mb-4">
Currently configured: <span className="font-mono">{props.models.settings.remoteOllamaUrl}</span>
</p>
)}
<div className="flex items-end gap-3">
<div className="flex-1">
<Input
name="remoteOllamaUrl"
label="Remote Ollama URL"
placeholder="http://192.168.1.100:11434"
value={remoteOllamaUrl}
onChange={(e) => {
setRemoteOllamaUrl(e.target.value)
setRemoteOllamaError(null)
}}
/>
{remoteOllamaError && (
<p className="text-sm text-red-600 mt-1">{remoteOllamaError}</p>
)}
</div>
<StyledButton
variant="primary"
onClick={handleSaveRemoteOllama}
loading={remoteOllamaSaving}
disabled={remoteOllamaSaving || !remoteOllamaUrl}
className="mb-0.5"
>
Save &amp; Test
</StyledButton>
{props.models.settings.remoteOllamaUrl && (
<StyledButton
variant="danger"
onClick={handleClearRemoteOllama}
loading={remoteOllamaSaving}
disabled={remoteOllamaSaving}
className="mb-0.5"
>
Clear
</StyledButton>
)}
</div>
</div>
<ActiveModelDownloads withHeader />
<StyledSectionHeader title="Models" className="mt-12 mb-4" />

View File

@ -105,6 +105,7 @@ router
router.post('/models', [OllamaController, 'dispatchModelDownload'])
router.delete('/models', [OllamaController, 'deleteModel'])
router.get('/installed-models', [OllamaController, 'installedModels'])
router.post('/configure-remote', [OllamaController, 'configureRemote'])
})
.prefix('/api/ollama')

View File

@ -8,6 +8,7 @@ export const KV_STORE_SCHEMA = {
'system.earlyAccess': 'boolean',
'ui.hasVisitedEasySetup': 'boolean',
'ai.assistantCustomName': 'string',
'ai.remoteOllamaUrl': 'string',
} as const
type KVTagToType<T extends string> = T extends 'boolean' ? boolean : string