fix(AI): allow force refresh of models list

This commit is contained in:
Jake Turner 2026-03-05 22:31:24 +00:00 committed by Jake Turner
parent bc016e6c60
commit db69428193
6 changed files with 45 additions and 13 deletions

View File

@ -22,6 +22,7 @@ export default class OllamaController {
recommendedOnly: reqData.recommendedOnly,
query: reqData.query || null,
limit: reqData.limit || 15,
force: reqData.force,
})
}

View File

@ -183,7 +183,7 @@ export class OllamaService {
}
async getAvailableModels(
{ sort, recommendedOnly, query, limit }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean, query: string | null, limit?: number } = {
{ sort, recommendedOnly, query, limit, force }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean, query: string | null, limit?: number, force?: boolean } = {
sort: 'pulls',
recommendedOnly: false,
query: null,
@ -191,7 +191,7 @@ export class OllamaService {
}
): Promise<{ models: NomadOllamaModel[], hasMore: boolean } | null> {
try {
const models = await this.retrieveAndRefreshModels(sort)
const models = await this.retrieveAndRefreshModels(sort, force)
if (!models) {
// If we fail to get models from the API, return the fallback recommended models
logger.warn(
@ -244,14 +244,19 @@ export class OllamaService {
}
private async retrieveAndRefreshModels(
sort?: 'pulls' | 'name'
sort?: 'pulls' | 'name',
force?: boolean
): Promise<NomadOllamaModel[] | null> {
try {
if (!force) {
const cachedModels = await this.readModelsFromCache()
if (cachedModels) {
logger.info('[OllamaService] Using cached available models data')
return this.sortModels(cachedModels, sort)
}
} else {
logger.info('[OllamaService] Force refresh requested, bypassing cache')
}
logger.info('[OllamaService] Fetching fresh available models from API')

View File

@ -19,5 +19,6 @@ export const getAvailableModelsSchema = vine.compile(
recommendedOnly: vine.boolean().optional(),
query: vine.string().trim().optional(),
limit: vine.number().positive().optional(),
force: vine.boolean().optional(),
})
)

View File

@ -20,6 +20,7 @@ const StyledButton: React.FC<StyledButtonProps> = ({
size = 'md',
loading = false,
fullWidth = false,
className,
...props
}) => {
const isDisabled = useMemo(() => {
@ -152,7 +153,8 @@ const StyledButton: React.FC<StyledButtonProps> = ({
getSizeClasses(),
getVariantClasses(),
isDisabled ? 'pointer-events-none opacity-60' : 'cursor-pointer',
'items-center justify-center rounded-md font-semibold focus:outline-none focus:ring-2 focus:ring-desert-green-light focus:ring-offset-2 focus:ring-offset-desert-sand disabled:cursor-not-allowed disabled:shadow-none'
'items-center justify-center rounded-md font-semibold focus:outline-none focus:ring-2 focus:ring-desert-green-light focus:ring-offset-2 focus:ring-offset-desert-sand disabled:cursor-not-allowed disabled:shadow-none',
className
)}
{...props}
disabled={isDisabled}

View File

@ -197,7 +197,7 @@ class API {
})()
}
async getAvailableModels(params: { query?: string; recommendedOnly?: boolean; limit?: number }) {
async getAvailableModels(params: { query?: string; recommendedOnly?: boolean; limit?: number; force?: boolean }) {
return catchInternal(async () => {
const response = await this.client.get<{
models: NomadOllamaModel[]

View File

@ -1,5 +1,5 @@
import { Head, router, usePage } from '@inertiajs/react'
import { useState } from 'react'
import { useRef, useState } from 'react'
import StyledTable from '~/components/StyledTable'
import SettingsLayout from '~/layouts/SettingsLayout'
import { NomadOllamaModel } from '../../../types/ollama'
@ -16,7 +16,7 @@ import Switch from '~/components/inputs/Switch'
import StyledSectionHeader from '~/components/StyledSectionHeader'
import { useMutation, useQuery } from '@tanstack/react-query'
import Input from '~/components/inputs/Input'
import { IconSearch } from '@tabler/icons-react'
import { IconSearch, IconRefresh } from '@tabler/icons-react'
import useDebounce from '~/hooks/useDebounce'
import ActiveModelDownloads from '~/components/ActiveModelDownloads'
@ -47,13 +47,19 @@ export default function ModelsPage(props: {
setQuery(val)
}, 300)
const { data: availableModelData, isFetching } = useQuery({
const forceRefreshRef = useRef(false)
const [isForceRefreshing, setIsForceRefreshing] = useState(false)
const { data: availableModelData, isFetching, refetch } = useQuery({
queryKey: ['ollama', 'availableModels', query, limit],
queryFn: async () => {
const force = forceRefreshRef.current
forceRefreshRef.current = false
const res = await api.getAvailableModels({
query,
recommendedOnly: false,
limit,
force: force || undefined,
})
if (!res) {
return {
@ -66,6 +72,14 @@ export default function ModelsPage(props: {
initialData: { models: props.models.availableModels, hasMore: false },
})
async function handleForceRefresh() {
forceRefreshRef.current = true
setIsForceRefreshing(true)
await refetch()
setIsForceRefreshing(false)
addNotification({ message: 'Model list refreshed from remote.', type: 'success' })
}
async function handleInstallModel(modelName: string) {
try {
const res = await api.downloadModel(modelName)
@ -196,7 +210,7 @@ export default function ModelsPage(props: {
<ActiveModelDownloads withHeader />
<StyledSectionHeader title="Models" className="mt-12 mb-4" />
<div className="flex justify-start mt-4">
<div className="flex justify-start items-center gap-3 mt-4">
<Input
name="search"
label=""
@ -209,6 +223,15 @@ export default function ModelsPage(props: {
className="w-1/3"
leftIcon={<IconSearch className="w-5 h-5 text-gray-400" />}
/>
<StyledButton
variant="secondary"
onClick={handleForceRefresh}
icon="IconRefresh"
loading={isForceRefreshing}
className='mt-1'
>
Refresh Models
</StyledButton>
</div>
<StyledTable<NomadOllamaModel>
className="font-semibold mt-4"