diff --git a/.changeset/hot-spoons-worry.md b/.changeset/hot-spoons-worry.md new file mode 100644 index 0000000..bec16c5 --- /dev/null +++ b/.changeset/hot-spoons-worry.md @@ -0,0 +1,5 @@ +--- +'@callstack/byorg-core': minor +--- + +core: expose `ChatModel.name` property diff --git a/packages/core/src/ai/types.ts b/packages/core/src/ai/types.ts index 9d4fb02..9c545f6 100644 --- a/packages/core/src/ai/types.ts +++ b/packages/core/src/ai/types.ts @@ -16,5 +16,6 @@ export type ModelUsage = { }; export interface ChatModel { + name: string; generateResponse(context: RequestContext): Promise; } diff --git a/packages/core/src/ai/vercel.ts b/packages/core/src/ai/vercel.ts index ae25555..9f128c7 100644 --- a/packages/core/src/ai/vercel.ts +++ b/packages/core/src/ai/vercel.ts @@ -29,7 +29,7 @@ const VERCEL_AI_SHARED_OPTIONS = { }, }; -export type VercelChatModelAdapterOptions = { +export type VercelChatModelAdapterConfig = { languageModel: LanguageModel; maxTokens?: number; maxSteps?: number; @@ -51,7 +51,15 @@ type AiExecutionResult = { }; export class VercelChatModelAdapter implements ChatModel { - constructor(private readonly _options: VercelChatModelAdapterOptions) {} + config: VercelChatModelAdapterConfig; + + constructor(config: VercelChatModelAdapterConfig) { + this.config = config; + } + + get name(): string { + return this.config.languageModel.modelId; + } async generateResponse(context: RequestContext): Promise { let systemPrompt = context.systemPrompt(); @@ -123,10 +131,10 @@ export class VercelChatModelAdapter implements ChatModel { const startTime = performance.now(); const result = await streamText({ ...VERCEL_AI_SHARED_OPTIONS, - model: this._options.languageModel, + model: this.config.languageModel, + maxTokens: this.config.maxTokens, + maxSteps: this.config.maxSteps, messages: context.messages, - maxTokens: this._options.maxTokens, - maxSteps: this._options.maxSteps, tools: context.tools, }); @@ -156,10 +164,10 @@ export class VercelChatModelAdapter implements ChatModel { const startTime = performance.now(); const result = await generateText({ ...VERCEL_AI_SHARED_OPTIONS, - model: this._options.languageModel, + model: this.config.languageModel, + maxTokens: this.config.maxTokens, + maxSteps: this.config.maxSteps, messages: context.messages, - maxTokens: this._options.maxTokens, - maxSteps: this._options.maxSteps, tools: context.tools, }); const responseTime = performance.now() - startTime; diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 15b1ebf..5076523 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -23,7 +23,7 @@ export { createApp } from './application.js'; export type { Middleware, NextFunction } from './middleware.js'; export type { AssistantResponse, ChatModel, ModelUsage } from './ai/types.js'; -export type { VercelChatModelAdapterOptions } from './ai/vercel.js'; +export type { VercelChatModelAdapterConfig } from './ai/vercel.js'; export { VercelChatModelAdapter } from './ai/vercel.js'; export type { Command, CommandsPluginConfig } from './plugins/commands.js'; diff --git a/packages/core/src/mock/mock-model.ts b/packages/core/src/mock/mock-model.ts index 9493301..f0185c0 100644 --- a/packages/core/src/mock/mock-model.ts +++ b/packages/core/src/mock/mock-model.ts @@ -34,6 +34,7 @@ export function createMockChatModel(config?: MockChatModelConfig): MockChatModel let lastRandom = config?.seed ?? Date.now(); return { calls, + name: 'mock', generateResponse: async (context: RequestContext): Promise => { calls.push([context]); lastRandom = random(lastRandom);