mirror of
https://github.com/Crosstalk-Solutions/project-nomad.git
synced 2026-03-28 03:29:25 +01:00
39 lines
1.6 KiB
TypeScript
39 lines
1.6 KiB
TypeScript
import { test } from '@japa/runner'
|
|
import { ChatService } from '../../../app/services/chat_service.js'
|
|
import { OllamaService } from '../../../app/services/ollama_service.js'
|
|
|
|
test.group('Services | ChatService (Mocked)', () => {
|
|
test('getChatSuggestions: should return an empty array if there are no models in Ollama', async ({ assert }) => {
|
|
// Mock OllamaService returning zero models
|
|
const mockOllamaService = {
|
|
getModels: async () => []
|
|
} as unknown as OllamaService
|
|
|
|
const chatService = new ChatService(mockOllamaService)
|
|
const suggestions = await chatService.getChatSuggestions()
|
|
|
|
assert.deepEqual(suggestions, [])
|
|
})
|
|
|
|
test('getChatSuggestions: should extract, clean and format suggestions generated by the LLM', async ({ assert }) => {
|
|
// Simulate a scenario where Ollama responds to the request
|
|
const mockOllamaService = {
|
|
getModels: async () => [{ name: 'llama3:8b', size: 4000000000 }],
|
|
chat: async () => ({
|
|
message: {
|
|
// Simulating dirty LLM output with list markers
|
|
content: '1. How to create a game in Python\n2. What is Docker?\n3. Artisanal burger recipe'
|
|
}
|
|
})
|
|
} as unknown as OllamaService
|
|
|
|
const chatService = new ChatService(mockOllamaService)
|
|
const suggestions = await chatService.getChatSuggestions()
|
|
|
|
// Verify regex and toTitleCase utility logic
|
|
assert.lengthOf(suggestions, 3)
|
|
assert.equal(suggestions[0], 'How To Create A Game In Python')
|
|
assert.equal(suggestions[1], 'What Is Docker?')
|
|
assert.equal(suggestions[2], 'Artisanal Burger Recipe')
|
|
})
|
|
}) |