diff --git a/content/docs/07-reference/03-ai-sdk-rsc/01-stream-ui.mdx b/content/docs/07-reference/03-ai-sdk-rsc/01-stream-ui.mdx index a886a698173f..765d5a84bba2 100644 --- a/content/docs/07-reference/03-ai-sdk-rsc/01-stream-ui.mdx +++ b/content/docs/07-reference/03-ai-sdk-rsc/01-stream-ui.mdx @@ -428,22 +428,27 @@ To see `streamUI` in action, check out [these examples](#examples). 'Detailed information about the input (prompt) tokens. See also: cached tokens and non-cached tokens.', properties: [ { - name: 'noCacheTokens', - type: 'number | undefined', - description: - 'The number of non-cached input (prompt) tokens used.', - }, - { - name: 'cacheReadTokens', - type: 'number | undefined', - description: - 'The number of cached input (prompt) tokens read.', - }, - { - name: 'cacheWriteTokens', - type: 'number | undefined', - description: - 'The number of cached input (prompt) tokens written.', + type: 'LanguageModelInputTokenDetails', + parameters: [ + { + name: 'noCacheTokens', + type: 'number | undefined', + description: + 'The number of non-cached input (prompt) tokens used.', + }, + { + name: 'cacheReadTokens', + type: 'number | undefined', + description: + 'The number of cached input (prompt) tokens read.', + }, + { + name: 'cacheWriteTokens', + type: 'number | undefined', + description: + 'The number of cached input (prompt) tokens written.', + }, + ], }, ], }, @@ -459,14 +464,19 @@ To see `streamUI` in action, check out [these examples](#examples). 'Detailed information about the output (completion) tokens.', properties: [ { - name: 'textTokens', - type: 'number | undefined', - description: 'The number of text tokens used.', - }, - { - name: 'reasoningTokens', - type: 'number | undefined', - description: 'The number of reasoning tokens used.', + type: 'LanguageModelOutputTokenDetails', + parameters: [ + { + name: 'textTokens', + type: 'number | undefined', + description: 'The number of text tokens used.', + }, + { + name: 'reasoningTokens', + type: 'number | undefined', + description: 'The number of reasoning tokens used.', + }, + ], }, ], }, @@ -654,22 +664,27 @@ To see `streamUI` in action, check out [these examples](#examples). 'Detailed information about the input (prompt) tokens. See also: cached tokens and non-cached tokens.', properties: [ { - name: 'noCacheTokens', - type: 'number | undefined', - description: - 'The number of non-cached input (prompt) tokens used.', - }, - { - name: 'cacheReadTokens', - type: 'number | undefined', - description: - 'The number of cached input (prompt) tokens read.', - }, - { - name: 'cacheWriteTokens', - type: 'number | undefined', - description: - 'The number of cached input (prompt) tokens written.', + type: 'LanguageModelInputTokenDetails', + parameters: [ + { + name: 'noCacheTokens', + type: 'number | undefined', + description: + 'The number of non-cached input (prompt) tokens used.', + }, + { + name: 'cacheReadTokens', + type: 'number | undefined', + description: + 'The number of cached input (prompt) tokens read.', + }, + { + name: 'cacheWriteTokens', + type: 'number | undefined', + description: + 'The number of cached input (prompt) tokens written.', + }, + ], }, ], }, @@ -686,14 +701,20 @@ To see `streamUI` in action, check out [these examples](#examples). 'Detailed information about the output (completion) tokens.', properties: [ { - name: 'textTokens', - type: 'number | undefined', - description: 'The number of text tokens used.', - }, - { - name: 'reasoningTokens', - type: 'number | undefined', - description: 'The number of reasoning tokens used.', + type: 'LanguageModelOutputTokenDetails', + parameters: [ + { + name: 'textTokens', + type: 'number | undefined', + description: 'The number of text tokens used.', + }, + { + name: 'reasoningTokens', + type: 'number | undefined', + description: + 'The number of reasoning tokens used.', + }, + ], }, ], },