fix(ui): reduce SSE reconnect churn and polling overhead on navigation

This commit is contained in:
Jake Turner 2026-03-20 18:38:10 +00:00 committed by Jake Turner
parent b8cf1b6127
commit d645fc161b
6 changed files with 26 additions and 9 deletions

View File

@ -3,7 +3,7 @@ import { defineConfig } from '@adonisjs/transmit'
import { redis } from '@adonisjs/transmit/transports' import { redis } from '@adonisjs/transmit/transports'
export default defineConfig({ export default defineConfig({
pingInterval: false, pingInterval: '30s',
transport: { transport: {
driver: redis({ driver: redis({
host: env.get('REDIS_HOST'), host: env.get('REDIS_HOST'),

View File

@ -40,7 +40,7 @@ createInertiaApp({
createRoot(el).render( createRoot(el).render(
<QueryClientProvider client={queryClient}> <QueryClientProvider client={queryClient}>
<ThemeProvider> <ThemeProvider>
<TransmitProvider baseUrl={window.location.origin} enableLogging={true}> <TransmitProvider baseUrl={window.location.origin} enableLogging={environment === 'development'}>
<NotificationsProvider> <NotificationsProvider>
<ModalsProvider> <ModalsProvider>
<App {...props} /> <App {...props} />

View File

@ -17,7 +17,11 @@ const useDownloads = (props: useDownloadsProps) => {
const queryData = useQuery({ const queryData = useQuery({
queryKey: queryKey, queryKey: queryKey,
queryFn: () => api.listDownloadJobs(props.filetype), queryFn: () => api.listDownloadJobs(props.filetype),
refetchInterval: 2000, // Refetch every 2 seconds to get updated progress refetchInterval: (query) => {
const data = query.state.data
// Only poll when there are active downloads; otherwise use a slower interval
return data && data.length > 0 ? 2000 : 30000
},
enabled: props.enabled ?? true, enabled: props.enabled ?? true,
}) })

View File

@ -7,7 +7,11 @@ const useEmbedJobs = (props: { enabled?: boolean } = {}) => {
const queryData = useQuery({ const queryData = useQuery({
queryKey: ['embed-jobs'], queryKey: ['embed-jobs'],
queryFn: () => api.getActiveEmbedJobs().then((data) => data ?? []), queryFn: () => api.getActiveEmbedJobs().then((data) => data ?? []),
refetchInterval: 2000, refetchInterval: (query) => {
const data = query.state.data
// Only poll when there are active jobs; otherwise use a slower interval
return data && data.length > 0 ? 2000 : 30000
},
enabled: props.enabled ?? true, enabled: props.enabled ?? true,
}) })

View File

@ -1,4 +1,4 @@
import { useEffect, useState } from 'react' import { useEffect, useRef, useState } from 'react'
import { useTransmit } from 'react-adonis-transmit' import { useTransmit } from 'react-adonis-transmit'
export type OllamaModelDownload = { export type OllamaModelDownload = {
@ -10,6 +10,7 @@ export type OllamaModelDownload = {
export default function useOllamaModelDownloads() { export default function useOllamaModelDownloads() {
const { subscribe } = useTransmit() const { subscribe } = useTransmit()
const [downloads, setDownloads] = useState<Map<string, OllamaModelDownload>>(new Map()) const [downloads, setDownloads] = useState<Map<string, OllamaModelDownload>>(new Map())
const timeoutsRef = useRef<Set<ReturnType<typeof setTimeout>>>(new Set())
useEffect(() => { useEffect(() => {
const unsubscribe = subscribe('ollama-model-download', (data: OllamaModelDownload) => { const unsubscribe = subscribe('ollama-model-download', (data: OllamaModelDownload) => {
@ -19,13 +20,15 @@ export default function useOllamaModelDownloads() {
if (data.percent >= 100) { if (data.percent >= 100) {
// If download is complete, keep it for a short time before removing to allow UI to show 100% progress // If download is complete, keep it for a short time before removing to allow UI to show 100% progress
updated.set(data.model, data) updated.set(data.model, data)
setTimeout(() => { const timeout = setTimeout(() => {
timeoutsRef.current.delete(timeout)
setDownloads((current) => { setDownloads((current) => {
const next = new Map(current) const next = new Map(current)
next.delete(data.model) next.delete(data.model)
return next return next
}) })
}, 2000) }, 2000)
timeoutsRef.current.add(timeout)
} else { } else {
updated.set(data.model, data) updated.set(data.model, data)
} }
@ -36,7 +39,10 @@ export default function useOllamaModelDownloads() {
return () => { return () => {
unsubscribe() unsubscribe()
timeoutsRef.current.forEach(clearTimeout)
timeoutsRef.current.clear()
} }
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [subscribe]) }, [subscribe])
const downloadsArray = Array.from(downloads.values()) const downloadsArray = Array.from(downloads.values())

View File

@ -1,5 +1,5 @@
import { Head, Link, usePage } from '@inertiajs/react' import { Head, Link, usePage } from '@inertiajs/react'
import { useState, useEffect } from 'react' import { useState, useEffect, useRef } from 'react'
import SettingsLayout from '~/layouts/SettingsLayout' import SettingsLayout from '~/layouts/SettingsLayout'
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query' import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
import CircularGauge from '~/components/systeminfo/CircularGauge' import CircularGauge from '~/components/systeminfo/CircularGauge'
@ -40,6 +40,7 @@ export default function BenchmarkPage(props: {
const aiInstalled = useServiceInstalledStatus(SERVICE_NAMES.OLLAMA) const aiInstalled = useServiceInstalledStatus(SERVICE_NAMES.OLLAMA)
const [progress, setProgress] = useState<BenchmarkProgressWithID | null>(null) const [progress, setProgress] = useState<BenchmarkProgressWithID | null>(null)
const [isRunning, setIsRunning] = useState(props.benchmark.status !== 'idle') const [isRunning, setIsRunning] = useState(props.benchmark.status !== 'idle')
const refetchLatestRef = useRef(refetchLatest)
const [showDetails, setShowDetails] = useState(false) const [showDetails, setShowDetails] = useState(false)
const [showHistory, setShowHistory] = useState(false) const [showHistory, setShowHistory] = useState(false)
const [showAIRequiredAlert, setShowAIRequiredAlert] = useState(false) const [showAIRequiredAlert, setShowAIRequiredAlert] = useState(false)
@ -60,6 +61,7 @@ export default function BenchmarkPage(props: {
}, },
initialData: props.benchmark.latestResult, initialData: props.benchmark.latestResult,
}) })
refetchLatestRef.current = refetchLatest
// Fetch all benchmark results for history // Fetch all benchmark results for history
const { data: benchmarkHistory } = useQuery({ const { data: benchmarkHistory } = useQuery({
@ -306,14 +308,15 @@ export default function BenchmarkPage(props: {
setProgress(data) setProgress(data)
if (data.status === 'completed' || data.status === 'error') { if (data.status === 'completed' || data.status === 'error') {
setIsRunning(false) setIsRunning(false)
refetchLatest() refetchLatestRef.current()
} }
}) })
return () => { return () => {
unsubscribe() unsubscribe()
} }
}, [subscribe, refetchLatest]) // eslint-disable-next-line react-hooks/exhaustive-deps
}, [subscribe])
const formatBytes = (bytes: number) => { const formatBytes = (bytes: number) => {
const gb = bytes / (1024 * 1024 * 1024) const gb = bytes / (1024 * 1024 * 1024)