diff --git a/.changeset/shy-cherries-cross.md b/.changeset/shy-cherries-cross.md new file mode 100644 index 000000000000..8888cc8b523c --- /dev/null +++ b/.changeset/shy-cherries-cross.md @@ -0,0 +1,5 @@ +--- +'@ai-sdk/huggingface': patch +--- + +Implement function_call_output conversion for client side tool execution diff --git a/examples/ai-core/src/generate-text/huggingface-tool-bug.ts b/examples/ai-core/src/generate-text/huggingface-tool-bug.ts new file mode 100644 index 000000000000..cb33f58a58cc --- /dev/null +++ b/examples/ai-core/src/generate-text/huggingface-tool-bug.ts @@ -0,0 +1,34 @@ +import { huggingface } from '@ai-sdk/huggingface'; +import { stepCountIs, streamText, tool } from 'ai'; +import z from 'zod'; +import 'dotenv/config'; + +async function main() { + const { textStream } = streamText({ + model: huggingface('deepseek-ai/DeepSeek-V3-0324'), + prompt: 'What is the weather in Montevideo, Uruguay?', + stopWhen: stepCountIs(5), + tools: { + weather: tool({ + description: 'Get the weather in a location', + inputSchema: z.object({ + location: z.string().describe('The location to get the weather for'), + }), + execute: async ({ location }) => ({ + location, + temperature: 72 + Math.floor(Math.random() * 21) - 10, + }), + }), + }, + onError({ error }) { + console.error(error); + }, + }); + + for await (const textPart of textStream) { + process.stdout.write(textPart); + } + console.log('\nDone'); +} + +main().catch(console.error); diff --git a/packages/huggingface/src/responses/convert-to-huggingface-responses-messages.ts b/packages/huggingface/src/responses/convert-to-huggingface-responses-messages.ts index 4b60d4c52168..829d069a55bc 100644 --- a/packages/huggingface/src/responses/convert-to-huggingface-responses-messages.ts +++ b/packages/huggingface/src/responses/convert-to-huggingface-responses-messages.ts @@ -4,6 +4,12 @@ import { UnsupportedFunctionalityError, } from '@ai-sdk/provider'; +export type HuggingFaceResponsesFunctionCallOutput = { + type: 'function_call_output'; + call_id: string; + output: string; +}; + export async function convertToHuggingFaceResponsesMessages({ prompt, }: { @@ -96,7 +102,35 @@ export async function convertToHuggingFaceResponsesMessages({ } case 'tool': { - warnings.push({ type: 'unsupported', feature: 'tool messages' }); + for (const part of content) { + const output = part.output; + + let contentValue: string; + switch (output.type) { + case 'text': + case 'error-text': + contentValue = output.value; + break; + case 'json': + case 'error-json': + contentValue = JSON.stringify(output.value); + break; + case 'execution-denied': + contentValue = output.reason ?? 'Tool execution denied.'; + break; + case 'content': + contentValue = JSON.stringify(output.value); + break; + } + + const functionCallOutput: HuggingFaceResponsesFunctionCallOutput = { + type: 'function_call_output', + call_id: part.toolCallId, + output: contentValue, + }; + + messages.push(functionCallOutput); + } break; } diff --git a/packages/huggingface/src/responses/huggingface-responses-language-model.test.ts b/packages/huggingface/src/responses/huggingface-responses-language-model.test.ts index 3401331599aa..2b329b9999b0 100644 --- a/packages/huggingface/src/responses/huggingface-responses-language-model.test.ts +++ b/packages/huggingface/src/responses/huggingface-responses-language-model.test.ts @@ -777,7 +777,7 @@ describe('HuggingFaceResponsesLanguageModel', () => { expect(warnings).toMatchInlineSnapshot(`[]`); }); - it('should warn about tool messages', async () => { + it('should not warn about tool messages', async () => { const { warnings } = await createModel( 'deepseek-ai/DeepSeek-V3-0324', ).doGenerate({ @@ -796,14 +796,57 @@ describe('HuggingFaceResponsesLanguageModel', () => { ], }); - expect(warnings).toMatchInlineSnapshot(` - [ + expect(warnings).toMatchInlineSnapshot(`[]`); + }); + }); + + it('should convert tool messages to function_call_output in request body', async () => { + server.urls['https://router.huggingface.co/v1/responses'].response = { + type: 'json-value', + body: { + id: 'resp_test', + model: 'deepseek-ai/DeepSeek-V3-0324', + object: 'response', + created_at: 1741257730, + status: 'completed', + error: null, + tools: [], + temperature: 1.0, + top_p: 1.0, + usage: { input_tokens: 10, output_tokens: 10, total_tokens: 20 }, + output: [ { - "feature": "tool messages", - "type": "unsupported", + id: 'msg_response', + type: 'message', + role: 'assistant', + status: 'completed', + content: [{ type: 'output_text', text: 'Got it!' }], }, - ] - `); + ], + }, + }; + + await createModel('deepseek-ai/DeepSeek-V3-0324').doGenerate({ + prompt: [ + { + role: 'tool', + content: [ + { + type: 'tool-result', + toolCallId: 'call_123', + toolName: 'test', + output: { type: 'text', value: 'test result' }, + }, + ], + }, + ], + }); + + const requestBody = await server.calls[0].requestBodyJson; + expect(requestBody.input).toContainEqual({ + type: 'function_call_output', + call_id: 'call_123', + output: 'test result', }); });