diff --git a/jest.config.ts b/jest.config.ts index 8f6d912..d74d8cc 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -48,7 +48,7 @@ const config: JestConfigWithTsJest = { global: { statements: 81, branches: 90, - functions: 92, + functions: 90, lines: 81, }, }, diff --git a/src/commands/chat/index.ts b/src/commands/chat/index.ts index 8ef415a..e18f6ac 100644 --- a/src/commands/chat/index.ts +++ b/src/commands/chat/index.ts @@ -69,9 +69,7 @@ export default class Chat extends BaseCommand { initChatContainer( { - headless: false, concurrency, - summarize: false, llmProvider, skipCaptcha, legacyProcessing, diff --git a/src/commands/config/set.ts b/src/commands/config/set.ts index 9fdd773..fb18103 100644 --- a/src/commands/config/set.ts +++ b/src/commands/config/set.ts @@ -23,7 +23,7 @@ export default class SetConfig extends Command { const openai = await uiInput.promptOpenAIConfig(existingConfig?.openai) const ollama = await uiInput.promptOllamaConfig(existingConfig?.ollama) - const config = { + const config: TConfig = { openai, ollama, } diff --git a/src/commands/download/papers.ts b/src/commands/download/papers.ts index 7be5457..6ce79bd 100644 --- a/src/commands/download/papers.ts +++ b/src/commands/download/papers.ts @@ -52,6 +52,7 @@ export default class DownloadPapers extends BaseCommand { const { headless } = this.flags initDownloadContainer({ headless }, this.logger) + this.odysseus = Container.get(Odysseus) await this.odysseus.init() diff --git a/src/containers/chat.container.ts b/src/containers/chat.container.ts index b8edb04..577ba68 100644 --- a/src/containers/chat.container.ts +++ b/src/containers/chat.container.ts @@ -8,9 +8,7 @@ import { initSearchContainer } from './search.container.js' export function initChatContainer( opts: { - headless: boolean concurrency: number - summarize: boolean llmProvider: LLMProvider skipCaptcha: boolean legacyProcessing: boolean @@ -18,7 +16,15 @@ export function initChatContainer( config: TConfig, logger: Quill, ) { - initSearchContainer(opts, config, logger) + initSearchContainer( + { + ...opts, + headless: false, + summarize: false, + }, + config, + logger, + ) Container.set(ChatOpenAI, Container.get(LLMFactory).getLLM(LLMProvider.OpenAI, config)) }