diff --git a/admin/app/services/chat_service.ts b/admin/app/services/chat_service.ts index 3bfb5d0..d395e53 100644 --- a/admin/app/services/chat_service.ts +++ b/admin/app/services/chat_service.ts @@ -32,7 +32,7 @@ export class ChatService { async getChatSuggestions() { try { const models = await this.ollamaService.getModels() - if (!models) { + if (!models || models.length === 0) { return [] // If no models are available, return empty suggestions } diff --git a/admin/app/services/docker_service.ts b/admin/app/services/docker_service.ts index 96a4ffb..7ba1e2e 100644 --- a/admin/app/services/docker_service.ts +++ b/admin/app/services/docker_service.ts @@ -545,8 +545,8 @@ export class DockerService { // If Ollama was just installed, trigger Nomad docs discovery and embedding if (service.service_name === SERVICE_NAMES.OLLAMA) { - logger.info('[DockerService] Ollama installation complete. Enabling chat suggestions by default.') - await KVStore.setValue('chat.suggestionsEnabled', "true") + logger.info('[DockerService] Ollama installation complete. Default behavior is to not enable chat suggestions.') + await KVStore.setValue('chat.suggestionsEnabled', "false") logger.info('[DockerService] Ollama installation complete. Triggering Nomad docs discovery...') diff --git a/admin/inertia/components/chat/ChatInterface.tsx b/admin/inertia/components/chat/ChatInterface.tsx index 60acf32..27d44c1 100644 --- a/admin/inertia/components/chat/ChatInterface.tsx +++ b/admin/inertia/components/chat/ChatInterface.tsx @@ -11,6 +11,7 @@ interface ChatInterfaceProps { onSendMessage: (message: string) => void isLoading?: boolean chatSuggestions?: string[] + chatSuggestionsEnabled?: boolean chatSuggestionsLoading?: boolean } @@ -19,6 +20,7 @@ export default function ChatInterface({ onSendMessage, isLoading = false, chatSuggestions = [], + chatSuggestionsEnabled = false, chatSuggestionsLoading = false, }: ChatInterfaceProps) { const [input, setInput] = useState('') @@ -69,7 +71,7 @@ export default function ChatInterface({
Interact with your installed language models directly in the Command Center.
- {chatSuggestions && chatSuggestions.length > 0 && !chatSuggestionsLoading && ( + {chatSuggestionsEnabled && chatSuggestions && chatSuggestions.length > 0 && !chatSuggestionsLoading && (