Skip to content

Commit

Permalink
test: add llm service basic test
Browse files Browse the repository at this point in the history
  • Loading branch information
rpidanny committed Jul 4, 2024
1 parent 329394b commit 9306b87
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 4 deletions.
8 changes: 5 additions & 3 deletions jest.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,15 @@ const config: JestConfigWithTsJest = {
'<rootDir>/coverage/',
'<rootDir>/src/commands/',
'\\.config\\.ts$',
'<rootDir>/src/services/chat/autonomous-agent.ts',
'<rootDir>/src/utils/ui/output.ts',
],
coverageThreshold: {
global: {
statements: 81,
statements: 95,
branches: 90,
functions: 90,
lines: 81,
functions: 94,
lines: 95,
},
},
}
Expand Down
71 changes: 71 additions & 0 deletions src/services/llm/llm.service.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { jest } from '@jest/globals'
import { BaseLanguageModel } from '@langchain/core/language_models/base'
import { mock } from 'jest-mock-extended'

import { LLMService } from './llm.service'

describe('LLMService', () => {
const mockBaseLanguageModel = mock<BaseLanguageModel>()

let llmService: LLMService

beforeEach(() => {
llmService = new LLMService(
mock<BaseLanguageModel>({
pipe: () => mockBaseLanguageModel,
}),
)
})

afterEach(() => {
jest.clearAllMocks()
jest.resetAllMocks()
})

describe('summarize', () => {
it('should call llm once for short text', async () => {
const inputText = 'input text'
mockBaseLanguageModel.invoke.mockResolvedValue('summary')

await expect(llmService.summarize(inputText)).resolves.toEqual('summary')

expect(mockBaseLanguageModel.invoke).toHaveBeenCalledTimes(1)
})

it('should call llm n times for longer text', async () => {
const inputText = 'input text'.repeat(10_000)
mockBaseLanguageModel.invoke.mockResolvedValue('summary')

await expect(llmService.summarize(inputText)).resolves.toEqual('summary')

// 2 calls for each chunk and 1 call for final summary
expect(mockBaseLanguageModel.invoke).toHaveBeenCalledTimes(3)
})
})

describe('ask', () => {
it('should call llm once', async () => {
const inputText = 'input text'
const question = 'question'

mockBaseLanguageModel.invoke.mockResolvedValue('answer')
mockBaseLanguageModel.getNumTokens.mockResolvedValue(3)

await expect(llmService.ask(inputText, question)).resolves.toEqual('answer')

expect(mockBaseLanguageModel.invoke).toHaveBeenCalledTimes(11)
})

it('should call llm n times for longer text', async () => {
const inputText = 'input text'.repeat(10_000)
const question = 'question'

mockBaseLanguageModel.invoke.mockResolvedValue('answer')
mockBaseLanguageModel.getNumTokens.mockResolvedValue(3)

await expect(llmService.ask(inputText, question)).resolves.toEqual('answer')

expect(mockBaseLanguageModel.invoke).toHaveBeenCalledTimes(31)
})
})
})
4 changes: 3 additions & 1 deletion src/services/llm/llm.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,9 @@ export class LLMService {
refinePrompt: SUMMARY_REFINE_PROMPT,
})

this.qaChain = loadQAMapReduceChain(llm)
this.qaChain = loadQAMapReduceChain(llm, {
verbose: false,
})
}

public async summarize(inputText: string) {
Expand Down

0 comments on commit 9306b87

Please sign in to comment.