feat(AI): chat suggestions and assistant settings

This commit is contained in:
Jake Turner 2026-02-01 07:24:21 +00:00
parent 029c2176f7
commit 1923cd4cde
24 changed files with 460 additions and 43 deletions

View File

@ -2,11 +2,22 @@ import { inject } from '@adonisjs/core'
import type { HttpContext } from '@adonisjs/core/http'
import { ChatService } from '#services/chat_service'
import { createSessionSchema, updateSessionSchema, addMessageSchema } from '#validators/chat'
import { parseBoolean } from '../utils/misc.js'
import KVStore from '#models/kv_store'
@inject()
export default class ChatsController {
constructor(private chatService: ChatService) {}
async inertia({ inertia }: HttpContext) {
const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled')
return inertia.render('chat', {
settings: {
chatSuggestionsEnabled: parseBoolean(chatSuggestionsEnabled),
},
})
}
async index({}: HttpContext) {
return await this.chatService.getAllSessions()
}
@ -34,6 +45,17 @@ export default class ChatsController {
}
}
async suggestions({ response }: HttpContext) {
try {
const suggestions = await this.chatService.getChatSuggestions()
return response.status(200).json({ suggestions })
} catch (error) {
return response.status(500).json({
error: error instanceof Error ? error.message : 'Failed to get suggestions',
})
}
}
async update({ params, request, response }: HttpContext) {
try {
const sessionId = parseInt(params.id)

View File

@ -1,9 +1,12 @@
import KVStore from '#models/kv_store';
import { BenchmarkService } from '#services/benchmark_service';
import { MapService } from '#services/map_service';
import { OllamaService } from '#services/ollama_service';
import { SystemService } from '#services/system_service';
import { updateSettingSchema } from '#validators/settings';
import { inject } from '@adonisjs/core';
import type { HttpContext } from '@adonisjs/core/http'
import { parseBoolean } from '../utils/misc.js';
@inject()
export default class SettingsController {
@ -50,10 +53,14 @@ export default class SettingsController {
async models({ inertia }: HttpContext) {
const availableModels = await this.ollamaService.getAvailableModels({ sort: 'pulls', recommendedOnly: false });
const installedModels = await this.ollamaService.getModels();
const chatSuggestionsEnabled = await KVStore.getValue('chat.suggestionsEnabled')
return inertia.render('settings/models', {
models: {
availableModels: availableModels || [],
installedModels: installedModels || []
installedModels: installedModels || [],
settings: {
chatSuggestionsEnabled: parseBoolean(chatSuggestionsEnabled)
}
}
});
}
@ -88,4 +95,16 @@ export default class SettingsController {
}
});
}
async getSetting({ request, response }: HttpContext) {
const key = request.qs().key;
const value = await KVStore.getValue(key);
return response.status(200).send({ key, value });
}
async updateSetting({ request, response }: HttpContext) {
const reqData = await request.validateUsing(updateSettingSchema);
await this.systemService.updateSetting(reqData.key, reqData.value);
return response.status(200).send({ success: true, message: 'Setting updated successfully' });
}
}

View File

@ -7,6 +7,7 @@ import type { KVStoreKey, KVStoreValue } from '../../types/kv_store.js'
* that don't necessitate their own dedicated models.
*/
export default class KVStore extends BaseModel {
static table = 'kv_store'
static namingStrategy = new SnakeCaseNamingStrategy()
@column({ isPrimary: true })
@ -29,7 +30,13 @@ export default class KVStore extends BaseModel {
*/
static async getValue(key: KVStoreKey): Promise<KVStoreValue> {
const setting = await this.findBy('key', key)
return setting?.value ?? null
if (!setting || setting.value === undefined || setting.value === null) {
return null
}
if (typeof setting.value === 'string') {
return setting.value
}
return String(setting.value)
}
/**

View File

@ -5,6 +5,8 @@ import { DateTime } from 'luxon'
import { inject } from '@adonisjs/core'
import { OllamaService } from './ollama_service.js'
import { ChatRequest } from 'ollama'
import { SYSTEM_PROMPTS } from '../../constants/ollama.js'
import { toTitleCase } from '../utils/misc.js'
@inject()
export class ChatService {
@ -37,9 +39,75 @@ export class ChatService {
}
}
async getChatSuggestions() {
try {
const models = await this.ollamaService.getModels()
if (!models) {
return [] // If no models are available, return empty suggestions
}
// Larger models generally give "better" responses, so pick the largest one
const largestModel = models.reduce((prev, current) => {
return prev.size > current.size ? prev : current
})
if (!largestModel) {
return []
}
const response = await this.ollamaService.chat({
model: largestModel.name,
messages: [
{
role: 'user',
content: SYSTEM_PROMPTS.chat_suggestions,
}
],
stream: false,
})
if (response && response.message && response.message.content) {
const content = response.message.content.trim()
// Handle both comma-separated and newline-separated formats
let suggestions: string[] = []
// Try splitting by commas first
if (content.includes(',')) {
suggestions = content.split(',').map((s) => s.trim())
}
// Fall back to newline separation
else {
suggestions = content
.split(/\r?\n/)
.map((s) => s.trim())
// Remove numbered list markers (1., 2., 3., etc.) and bullet points
.map((s) => s.replace(/^\d+\.\s*/, '').replace(/^[-*•]\s*/, ''))
// Remove surrounding quotes if present
.map((s) => s.replace(/^["']|["']$/g, ''))
}
// Filter out empty strings and limit to 3 suggestions
const filtered = suggestions
.filter((s) => s.length > 0)
.slice(0, 3)
return filtered.map((s) => toTitleCase(s))
} else {
return []
}
} catch (error) {
logger.error(
`[ChatService] Failed to get chat suggestions: ${
error instanceof Error ? error.message : error
}`
)
return []
}
}
async getSession(sessionId: number) {
try {
console.log('Fetching session with ID:', sessionId);
const session = await ChatSession.query().where('id', sessionId).preload('messages').first()
if (!session) {

View File

@ -10,6 +10,8 @@ import path, { join } from 'path'
import { getAllFilesystems, getFile } from '../utils/fs.js'
import axios from 'axios'
import env from '#start/env'
import KVStore from '#models/kv_store'
import { KVStoreKey } from '../../types/kv_store.js'
@inject()
export class SystemService {
@ -254,6 +256,10 @@ export class SystemService {
}
}
async updateSetting(key: KVStoreKey, value: any): Promise<void> {
await KVStore.setValue(key, value);
}
/**
* Checks the current state of Docker containers against the database records and updates the database accordingly.
* It will mark services as not installed if their corresponding containers do not exist, regardless of their running state.

View File

@ -3,3 +3,23 @@ export function formatSpeed(bytesPerSecond: number): string {
if (bytesPerSecond < 1024 * 1024) return `${(bytesPerSecond / 1024).toFixed(1)} KB/s`
return `${(bytesPerSecond / (1024 * 1024)).toFixed(1)} MB/s`
}
export function toTitleCase(str: string): string {
return str
.toLowerCase()
.split(' ')
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
.join(' ')
}
export function parseBoolean(value: any): boolean {
if (typeof value === 'boolean') return value
if (typeof value === 'string') {
const lower = value.toLowerCase()
return lower === 'true' || lower === '1'
}
if (typeof value === 'number') {
return value === 1
}
return false
}

View File

@ -0,0 +1,8 @@
import vine from "@vinejs/vine";
import { SETTINGS_KEYS } from "../../constants/kv_store.js";
export const updateSettingSchema = vine.compile(vine.object({
key: vine.enum(SETTINGS_KEYS),
value: vine.any(),
}))

View File

@ -0,0 +1,3 @@
import { KVStoreKey } from "../types/kv_store.js";
export const SETTINGS_KEYS: KVStoreKey[] = ['chat.suggestionsEnabled'];

View File

@ -72,5 +72,29 @@ You have access to the following relevant information from the knowledge base. U
${context}
If the user's question is related to this context, incorporate it into your response. Otherwise, respond normally.
`,
chat_suggestions: `
You are a helpful assistant that generates conversation starter suggestions for a survivalist/prepper using an AI assistant.
Provide exactly 3 conversation starter topics as direct questions that someone would ask.
These should be clear, complete questions that can start meaningful conversations.
Examples of good suggestions:
- "How do I purify water in an emergency?"
- "What are the best foods for long-term storage?"
- "Help me create a 72-hour emergency kit"
Do NOT use:
- Follow-up questions seeking clarification
- Vague or incomplete suggestions
- Questions that assume prior context
- Statements that are not suggestions themselves, such as praise for asking the question
- Direct questions or commands to the user
Return ONLY the 3 suggestions as a comma-separated list with no additional text, formatting, numbering, or quotation marks.
The suggestions should be in title case.
Ensure that your suggestions are comma-seperated with no conjunctions like "and" or "or".
Do not use line breaks, new lines, or extra spacing to separate the suggestions.
Format: suggestion1, suggestion2, suggestion3
`,
}

View File

@ -0,0 +1,29 @@
import clsx from 'clsx'
interface BouncingDotsProps {
text: string
containerClassName?: string
textClassName?: string
}
export default function BouncingDots({ text, containerClassName, textClassName }: BouncingDotsProps) {
return (
<div className={clsx("flex items-center justify-center gap-2", containerClassName)}>
<span className={clsx("text-gray-600", textClassName)}>{text}</span>
<span className="flex gap-1 mt-1">
<span
className="w-1.5 h-1.5 bg-gray-600 rounded-full animate-bounce"
style={{ animationDelay: '0ms' }}
/>
<span
className="w-1.5 h-1.5 bg-gray-600 rounded-full animate-bounce"
style={{ animationDelay: '150ms' }}
/>
<span
className="w-1.5 h-1.5 bg-gray-600 rounded-full animate-bounce"
style={{ animationDelay: '300ms' }}
/>
</span>
</div>
)
}

View File

@ -4,17 +4,22 @@ import classNames from '~/lib/classNames'
import { ChatMessage } from '../../../types/chat'
import ChatMessageBubble from './ChatMessageBubble'
import ChatAssistantAvatar from './ChatAssistantAvatar'
import BouncingDots from '../BouncingDots'
interface ChatInterfaceProps {
messages: ChatMessage[]
onSendMessage: (message: string) => void
isLoading?: boolean
chatSuggestions?: string[]
chatSuggestionsLoading?: boolean
}
export default function ChatInterface({
messages,
onSendMessage,
isLoading = false,
chatSuggestions = [],
chatSuggestionsLoading = false,
}: ChatInterfaceProps) {
const [input, setInput] = useState('')
const messagesEndRef = useRef<HTMLDivElement>(null)
@ -54,7 +59,7 @@ export default function ChatInterface({
}
return (
<div className="flex-1 flex flex-col min-h-0 bg-white">
<div className="flex-1 flex flex-col min-h-0 bg-white shadow-sm">
<div className="flex-1 overflow-y-auto px-6 py-4 space-y-6">
{messages.length === 0 ? (
<div className="h-full flex items-center justify-center">
@ -64,6 +69,30 @@ export default function ChatInterface({
<p className="text-gray-500 text-sm">
Interact with your installed language models directly in the Command Center.
</p>
{chatSuggestions && chatSuggestions.length > 0 && !chatSuggestionsLoading && (
<div className="mt-8">
<h4 className="text-sm font-medium text-gray-600 mb-2">Suggestions:</h4>
<div className="flex flex-col gap-2">
{chatSuggestions.map((suggestion, index) => (
<button
key={index}
onClick={() => {
setInput(suggestion)
// Focus the textarea after setting input
setTimeout(() => {
textareaRef.current?.focus()
}, 0)
}}
className="px-4 py-2 bg-gray-100 hover:bg-gray-200 rounded-lg text-sm text-gray-700 transition-colors"
>
{suggestion}
</button>
))}
</div>
</div>
)}
{/* Display bouncing dots while loading suggestions */}
{chatSuggestionsLoading && <BouncingDots text="Thinking" containerClassName="mt-8" />}
</div>
</div>
) : (
@ -85,23 +114,7 @@ export default function ChatInterface({
<div className="flex gap-4 justify-start">
<ChatAssistantAvatar />
<div className="max-w-[70%] rounded-lg px-4 py-3 bg-gray-100 text-gray-800">
<div className="flex items-center gap-2">
<span className="text-gray-600">Thinking</span>
<span className="flex gap-1 mt-1">
<span
className="w-1.5 h-1.5 bg-gray-600 rounded-full animate-bounce"
style={{ animationDelay: '0ms' }}
/>
<span
className="w-1.5 h-1.5 bg-gray-600 rounded-full animate-bounce"
style={{ animationDelay: '150ms' }}
/>
<span
className="w-1.5 h-1.5 bg-gray-600 rounded-full animate-bounce"
style={{ animationDelay: '300ms' }}
/>
</span>
</div>
<BouncingDots text="Thinking" />
</div>
</div>
)}

View File

@ -1,5 +1,7 @@
import { Dialog, DialogBackdrop, DialogPanel } from '@headlessui/react'
import Chat from './index'
import { useSystemSetting } from '~/hooks/useSystemSetting'
import { parseBoolean } from '../../../app/utils/misc'
interface ChatModalProps {
open: boolean
@ -7,6 +9,10 @@ interface ChatModalProps {
}
export default function ChatModal({ open, onClose }: ChatModalProps) {
const settings = useSystemSetting({
key: "chat.suggestionsEnabled"
})
return (
<Dialog open={open} onClose={onClose} className="relative z-50">
<DialogBackdrop
@ -19,7 +25,7 @@ export default function ChatModal({ open, onClose }: ChatModalProps) {
transition
className="relative bg-white rounded-xl shadow-2xl w-full max-w-7xl h-[85vh] flex overflow-hidden transition-all data-[closed]:scale-95 data-[closed]:opacity-0 data-[enter]:duration-300 data-[leave]:duration-200 data-[enter]:ease-out data-[leave]:ease-in"
>
<Chat enabled={open} isInModal onClose={onClose} />
<Chat enabled={open} isInModal onClose={onClose} suggestionsEnabled={parseBoolean(settings.data?.value)} />
</DialogPanel>
</div>
</Dialog>

View File

@ -97,7 +97,7 @@ export default function ChatSidebar({
size="sm"
fullWidth
>
Models
Models & Settings
</StyledButton>
<StyledButton
onClick={() => {

View File

@ -14,9 +14,15 @@ interface ChatProps {
enabled: boolean
isInModal?: boolean
onClose?: () => void
suggestionsEnabled?: boolean
}
export default function Chat({ enabled, isInModal, onClose }: ChatProps) {
export default function Chat({
enabled,
isInModal,
onClose,
suggestionsEnabled = false,
}: ChatProps) {
const queryClient = useQueryClient()
const { openModal, closeAllModals } = useModals()
const [activeSessionId, setActiveSessionId] = useState<string | null>(null)
@ -47,6 +53,15 @@ export default function Chat({ enabled, isInModal, onClose }: ChatProps) {
select: (data) => data || [],
})
const { data: chatSuggestions, isLoading: chatSuggestionsLoading } = useQuery<string[]>({
queryKey: ['chatSuggestions'],
queryFn: async () => {
const res = await api.getChatSuggestions()
return res ?? []
},
enabled: suggestionsEnabled,
})
const deleteAllSessionsMutation = useMutation({
mutationFn: () => api.deleteAllChatSessions(),
onSuccess: () => {
@ -263,6 +278,8 @@ export default function Chat({ enabled, isInModal, onClose }: ChatProps) {
messages={messages}
onSendMessage={handleSendMessage}
isLoading={chatMutation.isPending}
chatSuggestions={chatSuggestions}
chatSuggestionsLoading={chatSuggestionsLoading}
/>
</div>
</div>

View File

@ -0,0 +1,63 @@
import clsx from 'clsx'
interface SwitchProps {
checked: boolean
onChange: (checked: boolean) => void
label?: string
description?: string
disabled?: boolean
id?: string
}
export default function Switch({
checked,
onChange,
label,
description,
disabled = false,
id,
}: SwitchProps) {
const switchId = id || `switch-${label?.replace(/\s+/g, '-').toLowerCase()}`
return (
<div className="flex items-center justify-between py-2">
{(label || description) && (
<div className="flex-1">
{label && (
<label
htmlFor={switchId}
className="text-base font-medium text-gray-900 cursor-pointer"
>
{label}
</label>
)}
{description && <p className="text-sm text-gray-500 mt-1">{description}</p>}
</div>
)}
<div className="flex items-center ml-4">
<button
id={switchId}
type="button"
role="switch"
aria-checked={checked}
disabled={disabled}
onClick={() => !disabled && onChange(!checked)}
className={clsx(
'relative inline-flex h-6 w-11 flex-shrink-0 cursor-pointer rounded-full border-2 border-transparent',
'transition-colors duration-200 ease-in-out focus:outline-none focus:ring-2 focus:ring-desert-green focus:ring-offset-2',
checked ? 'bg-desert-green' : 'bg-gray-200',
disabled ? 'opacity-50 cursor-not-allowed' : ''
)}
>
<span
className={clsx(
'pointer-events-none inline-block h-5 w-5 transform rounded-full bg-white shadow ring-0',
'transition duration-200 ease-in-out',
checked ? 'translate-x-5' : 'translate-x-0'
)}
/>
</button>
</div>
</div>
)
}

View File

@ -5,7 +5,7 @@ import api from '~/lib/api'
const useServiceInstalledStatus = (serviceName: string) => {
const { data, isFetching } = useQuery<ServiceSlim[] | undefined>({
queryKey: ['installed-services'],
queryFn: () => api.listServices(),
queryFn: () => api.getSystemServices(),
})
const isInstalled = data?.some(

View File

@ -0,0 +1,22 @@
import { useQuery, UseQueryOptions } from '@tanstack/react-query'
import api from '~/lib/api'
import { KVStoreKey } from '../../types/kv_store';
export type UseSystemSettingProps = Omit<
UseQueryOptions<{ key: string; value: any } | undefined>,
'queryKey' | 'queryFn'
> & {
key: KVStoreKey
}
export const useSystemSetting = (props: UseSystemSettingProps) => {
const { key, ...queryOptions } = props
const queryData = useQuery<{ key: string; value: any } | undefined>({
...queryOptions,
queryKey: ['system-setting', key],
queryFn: async () => await api.getSetting(key),
})
return queryData
}

View File

@ -2,19 +2,19 @@ import {
IconArrowBigUpLines,
IconChartBar,
IconDashboard,
IconDatabaseStar,
IconFolder,
IconGavel,
IconMapRoute,
IconSettings,
IconTerminal2,
IconWand,
IconZoom
} from '@tabler/icons-react'
import StyledSidebar from '~/components/StyledSidebar'
import { getServiceLink } from '~/lib/navigation'
const navigation = [
{ name: 'AI Model Manager', href: '/settings/models', icon: IconDatabaseStar, current: false },
{ name: 'AI Assistant', href: '/settings/models', icon: IconWand, current: false },
{ name: 'Apps', href: '/settings/apps', icon: IconTerminal2, current: false },
{ name: 'Benchmark', href: '/settings/benchmark', icon: IconChartBar, current: false },
{ name: 'Content Explorer', href: '/settings/zim/remote-explorer', icon: IconZoom, current: false },

View File

@ -135,6 +135,15 @@ class API {
})()
}
async getChatSuggestions() {
return catchInternal(async () => {
const response = await this.client.get<{ suggestions: string[] }>(
'/chat/suggestions'
)
return response.data.suggestions
})()
}
async getInternetStatus() {
return catchInternal(async () => {
const response = await this.client.get<boolean>('/system/internet-status')
@ -167,14 +176,14 @@ class API {
async getBenchmarkResults() {
return catchInternal(async () => {
const response = await this.client.get<BenchmarkResult[]>('/benchmark/results')
const response = await this.client.get<{ results: BenchmarkResult[], total: number}>('/benchmark/results')
return response.data
})()
}
async getLatestBenchmarkResult() {
return catchInternal(async () => {
const response = await this.client.get<BenchmarkResult>('/benchmark/results/latest')
const response = await this.client.get<{ result: BenchmarkResult | null}>('/benchmark/results/latest')
return response.data
})()
}
@ -484,6 +493,26 @@ class API {
return response.data
})()
}
async getSetting(key: string) {
return catchInternal(async () => {
const response = await this.client.get<{ key: string; value: any }>(
'/system/settings',
{ params: { key } }
)
return response.data
})()
}
async updateSetting(key: string, value: any) {
return catchInternal(async () => {
const response = await this.client.patch<{ success: boolean; message: string }>(
'/system/settings',
{ key, value }
)
return response.data
})()
}
}
export default new API()

View File

@ -1,11 +1,11 @@
import { Head } from '@inertiajs/react'
import ChatComponent from '~/components/chat'
export default function Chat() {
export default function Chat(props: { settings: { chatSuggestionsEnabled: boolean } }) {
return (
<div className="w-full h-full">
<Head title="AI Assistant" />
<ChatComponent enabled={true} />
<ChatComponent enabled={true} suggestionsEnabled={props.settings.chatSuggestionsEnabled} />
</div>
)
}

View File

@ -51,7 +51,10 @@ export default function BenchmarkPage(props: {
queryKey: ['benchmark', 'latest'],
queryFn: async () => {
const res = await api.getLatestBenchmarkResult()
return res ?? null
if (res && res.result) {
return res.result
}
return null
},
initialData: props.benchmark.latestResult,
})
@ -61,7 +64,10 @@ export default function BenchmarkPage(props: {
queryKey: ['benchmark', 'history'],
queryFn: async () => {
const res = await api.getBenchmarkResults()
return res ?? []
if (res && res.results && Array.isArray(res.results)) {
return res.results
}
return []
},
})
@ -121,7 +127,7 @@ export default function BenchmarkPage(props: {
const updateBuilderTag = useMutation({
mutationFn: async ({
benchmarkId,
builderTag
builderTag,
}: {
benchmarkId: string
builderTag: string
@ -149,7 +155,11 @@ export default function BenchmarkPage(props: {
// First, save the current builder tag to the benchmark (don't refetch yet)
if (currentBuilderTag && !anonymous) {
await updateBuilderTag.mutateAsync({ benchmarkId, builderTag: currentBuilderTag, invalidate: false })
await updateBuilderTag.mutateAsync({
benchmarkId,
builderTag: currentBuilderTag,
invalidate: false,
})
}
const res = await api.submitBenchmark(benchmarkId, anonymous)

View File

@ -1,4 +1,5 @@
import { Head, router } from '@inertiajs/react'
import { useState } from 'react'
import StyledTable from '~/components/StyledTable'
import SettingsLayout from '~/layouts/SettingsLayout'
import { NomadOllamaModel } from '../../../types/ollama'
@ -11,13 +12,23 @@ import { useModals } from '~/context/ModalContext'
import StyledModal from '~/components/StyledModal'
import { ModelResponse } from 'ollama'
import { SERVICE_NAMES } from '../../../constants/service_names'
import Switch from '~/components/inputs/Switch'
import StyledSectionHeader from '~/components/StyledSectionHeader'
import { useMutation } from '@tanstack/react-query'
export default function ModelsPage(props: {
models: { availableModels: NomadOllamaModel[]; installedModels: ModelResponse[] }
models: {
availableModels: NomadOllamaModel[]
installedModels: ModelResponse[]
settings: { chatSuggestionsEnabled: boolean }
}
}) {
const { isInstalled } = useServiceInstalledStatus(SERVICE_NAMES.OLLAMA)
const { addNotification } = useNotifications()
const { openModal, closeAllModals } = useModals()
const [chatSuggestionsEnabled, setChatSuggestionsEnabled] = useState(
props.models.settings.chatSuggestionsEnabled
)
async function handleInstallModel(modelName: string) {
try {
@ -79,15 +90,35 @@ export default function ModelsPage(props: {
)
}
const updateSettingMutation = useMutation({
mutationFn: async ({ key, value }: { key: string; value: boolean }) => {
return await api.updateSetting(key, value)
},
onSuccess: () => {
addNotification({
message: 'Setting updated successfully.',
type: 'success',
})
},
onError: (error) => {
console.error('Error updating setting:', error)
addNotification({
message: 'There was an error updating the setting. Please try again.',
type: 'error',
})
},
})
return (
<SettingsLayout>
<Head title="AI Model Manager | Project N.O.M.A.D." />
<Head title="AI Assistant Settings | Project N.O.M.A.D." />
<div className="xl:pl-72 w-full">
<main className="px-12 py-6">
<h1 className="text-4xl font-semibold mb-4">AI Model Manager</h1>
<h1 className="text-4xl font-semibold mb-4">AI Assistant</h1>
<p className="text-gray-500 mb-4">
Easily manage the AI models available for AI Assistant. We recommend starting with smaller
models first to see how they perform on your system before moving on to larger ones.
Easily manage the AI Assistant's settings and installed models. We recommend starting
with smaller models first to see how they perform on your system before moving on to
larger ones.
</p>
{!isInstalled && (
<Alert
@ -97,8 +128,24 @@ export default function ModelsPage(props: {
className="!mt-6"
/>
)}
<StyledSectionHeader title="Settings" className="mt-8 mb-4" />
<div className="bg-white rounded-lg border-2 border-gray-200 p-6">
<div className="space-y-4">
<Switch
checked={chatSuggestionsEnabled}
onChange={(newVal) => {
setChatSuggestionsEnabled(newVal)
updateSettingMutation.mutate({ key: 'chat.suggestionsEnabled', value: newVal })
}}
label="Chat Suggestions"
description="Display AI-generated conversation starters in the chat interface"
/>
</div>
</div>
<StyledSectionHeader title="Models" className="mt-12 mb-4" />
<StyledTable<NomadOllamaModel>
className="font-semibold mt-8"
className="font-semibold"
rowLines={true}
columns={[
{

View File

@ -26,7 +26,7 @@ transmit.registerRoutes()
router.get('/', [HomeController, 'index'])
router.get('/home', [HomeController, 'home'])
router.on('/about').renderInertia('about')
router.on('/chat').renderInertia('chat')
router.get('/chat', [ChatsController, 'inertia'])
router.on('/knowledge-base').renderInertia('knowledge-base')
router.get('/maps', [MapsController, 'index'])
@ -113,6 +113,8 @@ router
})
.prefix('/api/chat/sessions')
router.get('/api/chat/suggestions', [ChatsController, 'suggestions'])
router
.group(() => {
router.post('/upload', [RagController, 'upload'])
@ -133,6 +135,8 @@ router
router.post('/update', [SystemController, 'requestSystemUpdate'])
router.get('/update/status', [SystemController, 'getSystemUpdateStatus'])
router.get('/update/logs', [SystemController, 'getSystemUpdateLogs'])
router.get('/settings', [SettingsController, 'getSetting'])
router.patch('/settings', [SettingsController, 'updateSetting'])
})
.prefix('/api/system')

View File

@ -1,3 +1,3 @@
export type KVStoreKey = ''
export type KVStoreKey = 'chat.suggestionsEnabled'
export type KVStoreValue = string | null