import { Head, router, usePage } from '@inertiajs/react'
import { useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import StyledTable from '~/components/StyledTable'
import SettingsLayout from '~/layouts/SettingsLayout'
import { NomadOllamaModel } from '../../../types/ollama'
import StyledButton from '~/components/StyledButton'
import useServiceInstalledStatus from '~/hooks/useServiceInstalledStatus'
import Alert from '~/components/Alert'
import { useNotifications } from '~/context/NotificationContext'
import api from '~/lib/api'
import { useModals } from '~/context/ModalContext'
import StyledModal from '~/components/StyledModal'
import { ModelResponse } from 'ollama'
import { SERVICE_NAMES } from '../../../constants/service_names'
import Switch from '~/components/inputs/Switch'
import StyledSectionHeader from '~/components/StyledSectionHeader'
import { useMutation, useQuery } from '@tanstack/react-query'
import Input from '~/components/inputs/Input'
import { IconSearch, IconRefresh } from '@tabler/icons-react'
import useDebounce from '~/hooks/useDebounce'
import ActiveModelDownloads from '~/components/ActiveModelDownloads'
import { useSystemInfo } from '~/hooks/useSystemInfo'
export default function ModelsPage(props: {
models: {
availableModels: NomadOllamaModel[]
installedModels: ModelResponse[]
settings: { chatSuggestionsEnabled: boolean; aiAssistantCustomName: string }
}
}) {
const { aiAssistantName } = usePage<{ aiAssistantName: string }>().props
const { isInstalled } = useServiceInstalledStatus(SERVICE_NAMES.OLLAMA)
const { addNotification } = useNotifications()
const { openModal, closeAllModals } = useModals()
const { debounce } = useDebounce()
const { data: systemInfo } = useSystemInfo({})
const { t } = useTranslation()
const [gpuBannerDismissed, setGpuBannerDismissed] = useState(() => {
try {
return localStorage.getItem('nomad:gpu-banner-dismissed') === 'true'
} catch {
return false
}
})
const [reinstalling, setReinstalling] = useState(false)
const handleDismissGpuBanner = () => {
setGpuBannerDismissed(true)
try {
localStorage.setItem('nomad:gpu-banner-dismissed', 'true')
} catch {}
}
const handleForceReinstallOllama = () => {
openModal(
{
closeAllModals()
setReinstalling(true)
try {
const response = await api.forceReinstallService('nomad_ollama')
if (!response || !response.success) {
throw new Error(response?.message || 'Force reinstall failed')
}
addNotification({
message: t('models.reinstallSuccess', { name: aiAssistantName }),
type: 'success',
})
try { localStorage.removeItem('nomad:gpu-banner-dismissed') } catch {}
setTimeout(() => window.location.reload(), 5000)
} catch (error) {
addNotification({
message: t('models.reinstallFailed', { error: error instanceof Error ? error.message : 'Unknown error' }),
type: 'error',
})
setReinstalling(false)
}
}}
onCancel={closeAllModals}
open={true}
confirmText={t('models.reinstall')}
cancelText={t('models.cancel')}
>
{t('models.reinstallMessage', { name: aiAssistantName })}
,
'gpu-health-force-reinstall-modal'
)
}
const [chatSuggestionsEnabled, setChatSuggestionsEnabled] = useState(
props.models.settings.chatSuggestionsEnabled
)
const [aiAssistantCustomName, setAiAssistantCustomName] = useState(
props.models.settings.aiAssistantCustomName
)
const [query, setQuery] = useState('')
const [queryUI, setQueryUI] = useState('')
const [limit, setLimit] = useState(15)
const debouncedSetQuery = debounce((val: string) => {
setQuery(val)
}, 300)
const forceRefreshRef = useRef(false)
const [isForceRefreshing, setIsForceRefreshing] = useState(false)
const { data: availableModelData, isFetching, refetch } = useQuery({
queryKey: ['ollama', 'availableModels', query, limit],
queryFn: async () => {
const force = forceRefreshRef.current
forceRefreshRef.current = false
const res = await api.getAvailableModels({
query,
recommendedOnly: false,
limit,
force: force || undefined,
})
if (!res) {
return {
models: [],
hasMore: false,
}
}
return res
},
initialData: { models: props.models.availableModels, hasMore: false },
})
async function handleForceRefresh() {
forceRefreshRef.current = true
setIsForceRefreshing(true)
await refetch()
setIsForceRefreshing(false)
addNotification({ message: t('models.refreshSuccess'), type: 'success' })
}
async function handleInstallModel(modelName: string) {
try {
const res = await api.downloadModel(modelName)
if (res.success) {
addNotification({
message: t('models.downloadInitiated', { name: modelName }),
type: 'success',
})
}
} catch (error) {
console.error('Error installing model:', error)
addNotification({
message: t('models.downloadError', { name: modelName }),
type: 'error',
})
}
}
async function handleDeleteModel(modelName: string) {
try {
const res = await api.deleteModel(modelName)
if (res.success) {
addNotification({
message: t('models.deleteSuccess', { name: modelName }),
type: 'success',
})
}
closeAllModals()
router.reload()
} catch (error) {
console.error('Error deleting model:', error)
addNotification({
message: t('models.deleteError', { name: modelName }),
type: 'error',
})
}
}
async function confirmDeleteModel(model: string) {
openModal(
{
handleDeleteModel(model)
}}
onCancel={closeAllModals}
open={true}
confirmText={t('models.delete')}
cancelText={t('models.cancel')}
confirmVariant="primary"
>
{t('models.deleteModelMessage')}
,
'confirm-delete-model-modal'
)
}
const updateSettingMutation = useMutation({
mutationFn: async ({ key, value }: { key: string; value: boolean | string }) => {
return await api.updateSetting(key, value)
},
onSuccess: () => {
addNotification({
message: t('models.settingUpdated'),
type: 'success',
})
},
onError: (error) => {
console.error('Error updating setting:', error)
addNotification({
message: t('models.settingUpdateError'),
type: 'error',
})
},
})
return (
{aiAssistantName}
{t('models.description', { name: aiAssistantName })}
{!isInstalled && (
)}
{isInstalled && systemInfo?.gpuHealth?.status === 'passthrough_failed' && !gpuBannerDismissed && (
)}
{
setQueryUI(e.target.value)
debouncedSetQuery(e.target.value)
}}
className="w-1/3"
leftIcon={}
/>
{t('models.refreshModels')}
className="font-semibold mt-4"
rowLines={true}
columns={[
{
accessor: 'name',
title: t('models.columns.name'),
render(record) {
return (
{record.name}
{record.description}
)
},
},
{
accessor: 'estimated_pulls',
title: t('models.columns.estimatedPulls'),
},
{
accessor: 'model_last_updated',
title: t('models.columns.lastUpdated'),
},
]}
data={availableModelData?.models || []}
loading={isFetching}
expandable={{
expandedRowRender: (record) => (
|
{t('models.columns.tag')}
|
{t('models.columns.inputType')}
|
{t('models.columns.contextSize')}
|
{t('models.columns.modelSize')}
|
{t('models.columns.action')}
|
{record.tags.map((tag, tagIndex) => {
const isInstalled = props.models.installedModels.some(
(mod) => mod.name === tag.name
)
return (
|
{tag.name}
|
{tag.input || 'N/A'}
|
{tag.context || 'N/A'}
|
{tag.size || 'N/A'}
|
{
if (!isInstalled) {
handleInstallModel(tag.name)
} else {
confirmDeleteModel(tag.name)
}
}}
icon={isInstalled ? 'IconTrash' : 'IconDownload'}
>
{isInstalled ? t('models.delete') : t('models.install')}
|
)
})}
),
}}
/>
{availableModelData?.hasMore && (
{
setLimit((prev) => prev + 15)
}}
>
{t('models.loadMore')}
)}
)
}