feat(Models): paginate available models endpoint

This commit is contained in:
Jake Turner 2026-02-25 06:13:40 +00:00 committed by Jake Turner
parent a3f10dd158
commit 6874a2824f
7 changed files with 68 additions and 23 deletions

View File

@ -21,6 +21,7 @@ export default class OllamaController {
sort: reqData.sort, sort: reqData.sort,
recommendedOnly: reqData.recommendedOnly, recommendedOnly: reqData.recommendedOnly,
query: reqData.query || null, query: reqData.query || null,
limit: reqData.limit || 15,
}) })
} }

View File

@ -51,12 +51,12 @@ export default class SettingsController {
} }
async models({ inertia }: HttpContext) { async models({ inertia }: HttpContext) {
const availableModels = await this.ollamaService.getAvailableModels({ sort: 'pulls', recommendedOnly: false, query: null }); const availableModels = await this.ollamaService.getAvailableModels({ sort: 'pulls', recommendedOnly: false, query: null, limit: 15 });
const installedModels = await this.ollamaService.getModels(); const installedModels = await this.ollamaService.getModels();
const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled') const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled')
return inertia.render('settings/models', { return inertia.render('settings/models', {
models: { models: {
availableModels: availableModels || [], availableModels: availableModels?.models || [],
installedModels: installedModels || [], installedModels: installedModels || [],
settings: { settings: {
chatSuggestionsEnabled: parseBoolean(chatSuggestionsEnabled) chatSuggestionsEnabled: parseBoolean(chatSuggestionsEnabled)

View File

@ -183,12 +183,13 @@ export class OllamaService {
} }
async getAvailableModels( async getAvailableModels(
{ sort, recommendedOnly, query }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean, query: string | null } = { { sort, recommendedOnly, query, limit }: { sort?: 'pulls' | 'name'; recommendedOnly?: boolean, query: string | null, limit?: number } = {
sort: 'pulls', sort: 'pulls',
recommendedOnly: false, recommendedOnly: false,
query: null, query: null,
limit: 15,
} }
): Promise<NomadOllamaModel[] | null> { ): Promise<{ models: NomadOllamaModel[], hasMore: boolean } | null> {
try { try {
const models = await this.retrieveAndRefreshModels(sort) const models = await this.retrieveAndRefreshModels(sort)
if (!models) { if (!models) {
@ -196,12 +197,18 @@ export class OllamaService {
logger.warn( logger.warn(
'[OllamaService] Returning fallback recommended models due to failure in fetching available models' '[OllamaService] Returning fallback recommended models due to failure in fetching available models'
) )
return FALLBACK_RECOMMENDED_OLLAMA_MODELS return {
models: FALLBACK_RECOMMENDED_OLLAMA_MODELS,
hasMore: false
}
} }
if (!recommendedOnly) { if (!recommendedOnly) {
const filteredModels = query ? this.fuseSearchModels(models, query) : models const filteredModels = query ? this.fuseSearchModels(models, query) : models
return filteredModels return {
models: filteredModels.slice(0, limit || 15),
hasMore: filteredModels.length > (limit || 15)
}
} }
// If recommendedOnly is true, only return the first three models (if sorted by pulls, these will be the top 3) // If recommendedOnly is true, only return the first three models (if sorted by pulls, these will be the top 3)
@ -217,10 +224,17 @@ export class OllamaService {
}) })
if (query) { if (query) {
return this.fuseSearchModels(recommendedModels, query) const filteredRecommendedModels = this.fuseSearchModels(recommendedModels, query)
return {
models: filteredRecommendedModels,
hasMore: filteredRecommendedModels.length > (limit || 15)
}
} }
return recommendedModels return {
models: recommendedModels,
hasMore: recommendedModels.length > (limit || 15)
}
} catch (error) { } catch (error) {
logger.error( logger.error(
`[OllamaService] Failed to get available models: ${error instanceof Error ? error.message : error}` `[OllamaService] Failed to get available models: ${error instanceof Error ? error.message : error}`
@ -253,7 +267,7 @@ export class OllamaService {
} }
const rawModels = response.data.models as NomadOllamaModel[] const rawModels = response.data.models as NomadOllamaModel[]
// Filter out tags where cloud is truthy, then remove models with no remaining tags // Filter out tags where cloud is truthy, then remove models with no remaining tags
const noCloud = rawModels const noCloud = rawModels
.map((model) => ({ .map((model) => ({

View File

@ -18,5 +18,6 @@ export const getAvailableModelsSchema = vine.compile(
sort: vine.enum(['pulls', 'name'] as const).optional(), sort: vine.enum(['pulls', 'name'] as const).optional(),
recommendedOnly: vine.boolean().optional(), recommendedOnly: vine.boolean().optional(),
query: vine.string().trim().optional(), query: vine.string().trim().optional(),
limit: vine.number().positive().optional(),
}) })
) )

View File

@ -196,10 +196,13 @@ class API {
})() })()
} }
async getAvailableModels(query: string | null, recommendedOnly: boolean): Promise<NomadOllamaModel[] | undefined> { async getAvailableModels(params: { query?: string; recommendedOnly?: boolean; limit?: number }) {
return catchInternal(async () => { return catchInternal(async () => {
const response = await this.client.get<NomadOllamaModel[]>('/ollama/models', { const response = await this.client.get<{
params: { sort: 'pulls', recommendedOnly, query }, models: NomadOllamaModel[]
hasMore: boolean
}>('/ollama/models', {
params: { sort: 'pulls', ...params },
}) })
return response.data return response.data
})() })()
@ -506,7 +509,7 @@ class API {
// For 409 Conflict errors, throw a specific error that the UI can handle // For 409 Conflict errors, throw a specific error that the UI can handle
if (error.response?.status === 409) { if (error.response?.status === 409) {
const err = new Error(error.response?.data?.error || 'This benchmark has already been submitted to the repository') const err = new Error(error.response?.data?.error || 'This benchmark has already been submitted to the repository')
;(err as any).status = 409 ; (err as any).status = 409
throw err throw err
} }
// For other errors, extract the message and throw // For other errors, extract the message and throw

View File

@ -152,7 +152,13 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const { data: recommendedModels, isLoading: isLoadingRecommendedModels } = useQuery({ const { data: recommendedModels, isLoading: isLoadingRecommendedModels } = useQuery({
queryKey: ['recommended-ollama-models'], queryKey: ['recommended-ollama-models'],
queryFn: () => api.getAvailableModels(null, true), queryFn: async () => {
const res = await api.getAvailableModels({ recommendedOnly: true })
if (!res) {
return []
}
return res.models
},
refetchOnWindowFocus: false, refetchOnWindowFocus: false,
}) })
@ -736,7 +742,7 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
className={classNames( className={classNames(
'relative', 'relative',
selectedMapCollections.includes(collection.slug) && selectedMapCollections.includes(collection.slug) &&
'ring-4 ring-desert-green rounded-lg', 'ring-4 ring-desert-green rounded-lg',
collection.all_installed && 'opacity-75', collection.all_installed && 'opacity-75',
!isOnline && 'opacity-50 cursor-not-allowed' !isOnline && 'opacity-50 cursor-not-allowed'
)} )}
@ -760,7 +766,7 @@ export default function EasySetupWizard(props: { system: { services: ServiceSlim
const renderStep3 = () => { const renderStep3 = () => {
// Check if AI or Information capabilities are selected OR already installed // Check if AI or Information capabilities are selected OR already installed
const isAiSelected = selectedServices.includes(SERVICE_NAMES.OLLAMA) || const isAiSelected = selectedServices.includes(SERVICE_NAMES.OLLAMA) ||
installedServices.some((s) => s.service_name === SERVICE_NAMES.OLLAMA) installedServices.some((s) => s.service_name === SERVICE_NAMES.OLLAMA)
const isInformationSelected = selectedServices.includes(SERVICE_NAMES.KIWIX) || const isInformationSelected = selectedServices.includes(SERVICE_NAMES.KIWIX) ||
installedServices.some((s) => s.service_name === SERVICE_NAMES.KIWIX) installedServices.some((s) => s.service_name === SERVICE_NAMES.KIWIX)

View File

@ -37,21 +37,29 @@ export default function ModelsPage(props: {
const [query, setQuery] = useState('') const [query, setQuery] = useState('')
const [queryUI, setQueryUI] = useState('') const [queryUI, setQueryUI] = useState('')
const [limit, setLimit] = useState(15)
const debouncedSetQuery = debounce((val: string) => { const debouncedSetQuery = debounce((val: string) => {
setQuery(val) setQuery(val)
}, 300) }, 300)
const { data: availableModels, isLoading } = useQuery({ const { data: availableModelData, isFetching } = useQuery({
queryKey: ['ollama', 'availableModels', query], queryKey: ['ollama', 'availableModels', query, limit],
queryFn: async () => { queryFn: async () => {
const res = await api.getAvailableModels(query, false) const res = await api.getAvailableModels({
query,
recommendedOnly: false,
limit,
})
if (!res) { if (!res) {
return [] return {
models: [],
hasMore: false,
}
} }
return res return res
}, },
initialData: props.models.availableModels, initialData: { models: props.models.availableModels, hasMore: false },
}) })
async function handleInstallModel(modelName: string) { async function handleInstallModel(modelName: string) {
@ -209,8 +217,8 @@ export default function ModelsPage(props: {
title: 'Last Updated', title: 'Last Updated',
}, },
]} ]}
data={availableModels || []} data={availableModelData?.models || []}
loading={isLoading} loading={isFetching}
expandable={{ expandable={{
expandedRowRender: (record) => ( expandedRowRender: (record) => (
<div className="pl-14"> <div className="pl-14">
@ -283,6 +291,18 @@ export default function ModelsPage(props: {
), ),
}} }}
/> />
<div className="flex justify-center mt-6">
{availableModelData?.hasMore && (
<StyledButton
variant="primary"
onClick={() => {
setLimit((prev) => prev + 15)
}}
>
Load More
</StyledButton>
)}
</div>
</main> </main>
</div> </div>
</SettingsLayout> </SettingsLayout>