fix(AI): allow force refresh of models list

This commit is contained in:
Jake Turner 2026-03-05 22:31:24 +00:00 committed by Jake Turner
parent bc016e6c60
commit db69428193
6 changed files with 45 additions and 13 deletions

View File

@ -22,6 +22,7 @@ export default class OllamaController {
recommendedOnly: reqData.recommendedOnly, recommendedOnly: reqData.recommendedOnly,
query: reqData.query || null, query: reqData.query || null,
limit: reqData.limit || 15, limit: reqData.limit || 15,
force: reqData.force,
}) })
} }

View File

@ -183,7 +183,7 @@ export class OllamaService {
} }
async getAvailableModels( async getAvailableModels(
{ sort, recommendedOnly, query, limit }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean, query: string | null, limit?: number } = { { sort, recommendedOnly, query, limit, force }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean, query: string | null, limit?: number, force?: boolean } = {
sort: 'pulls', sort: 'pulls',
recommendedOnly: false, recommendedOnly: false,
query: null, query: null,
@ -191,7 +191,7 @@ export class OllamaService {
} }
): Promise<{ models: NomadOllamaModel[], hasMore: boolean } | null> { ): Promise<{ models: NomadOllamaModel[], hasMore: boolean } | null> {
try { try {
const models = await this.retrieveAndRefreshModels(sort) const models = await this.retrieveAndRefreshModels(sort, force)
if (!models) { if (!models) {
// If we fail to get models from the API, return the fallback recommended models // If we fail to get models from the API, return the fallback recommended models
logger.warn( logger.warn(
@ -244,13 +244,18 @@ export class OllamaService {
} }
private async retrieveAndRefreshModels( private async retrieveAndRefreshModels(
sort?: 'pulls' | 'name' sort?: 'pulls' | 'name',
force?: boolean
): Promise<NomadOllamaModel[] | null> { ): Promise<NomadOllamaModel[] | null> {
try { try {
const cachedModels = await this.readModelsFromCache() if (!force) {
if (cachedModels) { const cachedModels = await this.readModelsFromCache()
logger.info('[OllamaService] Using cached available models data') if (cachedModels) {
return this.sortModels(cachedModels, sort) logger.info('[OllamaService] Using cached available models data')
return this.sortModels(cachedModels, sort)
}
} else {
logger.info('[OllamaService] Force refresh requested, bypassing cache')
} }
logger.info('[OllamaService] Fetching fresh available models from API') logger.info('[OllamaService] Fetching fresh available models from API')

View File

@ -19,5 +19,6 @@ export const getAvailableModelsSchema = vine.compile(
recommendedOnly: vine.boolean().optional(), recommendedOnly: vine.boolean().optional(),
query: vine.string().trim().optional(), query: vine.string().trim().optional(),
limit: vine.number().positive().optional(), limit: vine.number().positive().optional(),
force: vine.boolean().optional(),
}) })
) )

View File

@ -20,6 +20,7 @@ const StyledButton: React.FC<StyledButtonProps> = ({
size = 'md', size = 'md',
loading = false, loading = false,
fullWidth = false, fullWidth = false,
className,
...props ...props
}) => { }) => {
const isDisabled = useMemo(() => { const isDisabled = useMemo(() => {
@ -152,7 +153,8 @@ const StyledButton: React.FC<StyledButtonProps> = ({
getSizeClasses(), getSizeClasses(),
getVariantClasses(), getVariantClasses(),
isDisabled ? 'pointer-events-none opacity-60' : 'cursor-pointer', isDisabled ? 'pointer-events-none opacity-60' : 'cursor-pointer',
'items-center justify-center rounded-md font-semibold focus:outline-none focus:ring-2 focus:ring-desert-green-light focus:ring-offset-2 focus:ring-offset-desert-sand disabled:cursor-not-allowed disabled:shadow-none' 'items-center justify-center rounded-md font-semibold focus:outline-none focus:ring-2 focus:ring-desert-green-light focus:ring-offset-2 focus:ring-offset-desert-sand disabled:cursor-not-allowed disabled:shadow-none',
className
)} )}
{...props} {...props}
disabled={isDisabled} disabled={isDisabled}

View File

@ -197,7 +197,7 @@ class API {
})() })()
} }
async getAvailableModels(params: { query?: string; recommendedOnly?: boolean; limit?: number }) { async getAvailableModels(params: { query?: string; recommendedOnly?: boolean; limit?: number; force?: boolean }) {
return catchInternal(async () => { return catchInternal(async () => {
const response = await this.client.get<{ const response = await this.client.get<{
models: NomadOllamaModel[] models: NomadOllamaModel[]

View File

@ -1,5 +1,5 @@
import { Head, router, usePage } from '@inertiajs/react' import { Head, router, usePage } from '@inertiajs/react'
import { useState } from 'react' import { useRef, useState } from 'react'
import StyledTable from '~/components/StyledTable' import StyledTable from '~/components/StyledTable'
import SettingsLayout from '~/layouts/SettingsLayout' import SettingsLayout from '~/layouts/SettingsLayout'
import { NomadOllamaModel } from '../../../types/ollama' import { NomadOllamaModel } from '../../../types/ollama'
@ -16,7 +16,7 @@ import Switch from '~/components/inputs/Switch'
import StyledSectionHeader from '~/components/StyledSectionHeader' import StyledSectionHeader from '~/components/StyledSectionHeader'
import { useMutation, useQuery } from '@tanstack/react-query' import { useMutation, useQuery } from '@tanstack/react-query'
import Input from '~/components/inputs/Input' import Input from '~/components/inputs/Input'
import { IconSearch } from '@tabler/icons-react' import { IconSearch, IconRefresh } from '@tabler/icons-react'
import useDebounce from '~/hooks/useDebounce' import useDebounce from '~/hooks/useDebounce'
import ActiveModelDownloads from '~/components/ActiveModelDownloads' import ActiveModelDownloads from '~/components/ActiveModelDownloads'
@ -47,13 +47,19 @@ export default function ModelsPage(props: {
setQuery(val) setQuery(val)
}, 300) }, 300)
const { data: availableModelData, isFetching } = useQuery({ const forceRefreshRef = useRef(false)
const [isForceRefreshing, setIsForceRefreshing] = useState(false)
const { data: availableModelData, isFetching, refetch } = useQuery({
queryKey: ['ollama', 'availableModels', query, limit], queryKey: ['ollama', 'availableModels', query, limit],
queryFn: async () => { queryFn: async () => {
const force = forceRefreshRef.current
forceRefreshRef.current = false
const res = await api.getAvailableModels({ const res = await api.getAvailableModels({
query, query,
recommendedOnly: false, recommendedOnly: false,
limit, limit,
force: force || undefined,
}) })
if (!res) { if (!res) {
return { return {
@ -66,6 +72,14 @@ export default function ModelsPage(props: {
initialData: { models: props.models.availableModels, hasMore: false }, initialData: { models: props.models.availableModels, hasMore: false },
}) })
async function handleForceRefresh() {
forceRefreshRef.current = true
setIsForceRefreshing(true)
await refetch()
setIsForceRefreshing(false)
addNotification({ message: 'Model list refreshed from remote.', type: 'success' })
}
async function handleInstallModel(modelName: string) { async function handleInstallModel(modelName: string) {
try { try {
const res = await api.downloadModel(modelName) const res = await api.downloadModel(modelName)
@ -196,7 +210,7 @@ export default function ModelsPage(props: {
<ActiveModelDownloads withHeader /> <ActiveModelDownloads withHeader />
<StyledSectionHeader title="Models" className="mt-12 mb-4" /> <StyledSectionHeader title="Models" className="mt-12 mb-4" />
<div className="flex justify-start mt-4"> <div className="flex justify-start items-center gap-3 mt-4">
<Input <Input
name="search" name="search"
label="" label=""
@ -209,6 +223,15 @@ export default function ModelsPage(props: {
className="w-1/3" className="w-1/3"
leftIcon={<IconSearch className="w-5 h-5 text-gray-400" />} leftIcon={<IconSearch className="w-5 h-5 text-gray-400" />}
/> />
<StyledButton
variant="secondary"
onClick={handleForceRefresh}
icon="IconRefresh"
loading={isForceRefreshing}
className='mt-1'
>
Refresh Models
</StyledButton>
</div> </div>
<StyledTable<NomadOllamaModel> <StyledTable<NomadOllamaModel>
className="font-semibold mt-4" className="font-semibold mt-4"