From 21a4346bd1505c77a48caf48c8be510318ff04b6 Mon Sep 17 00:00:00 2001 From: tomsun28 Date: Fri, 10 Oct 2025 17:02:16 +0800 Subject: [PATCH] chore: add stream tools example --- examples/basic_usage.py | 16 ++++---- examples/stream_tools.py | 81 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+), 8 deletions(-) create mode 100644 examples/stream_tools.py diff --git a/examples/basic_usage.py b/examples/basic_usage.py index 3a65216..2cac3cf 100644 --- a/examples/basic_usage.py +++ b/examples/basic_usage.py @@ -6,9 +6,9 @@ def completion(): # Create chat completion response = client.chat.completions.create( - model='glm-4', + model='glm-4.6', messages=[{'role': 'user', 'content': 'Hello, Z.ai!'}], - temperature=0.7, + temperature=1.0, ) print(response.choices[0].message.content) @@ -19,7 +19,7 @@ def completion_with_stream(): # Create chat completion response = client.chat.completions.create( - model='glm-4', + model='glm-4.6', messages=[ {'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'Tell me a story about AI.'}, @@ -38,7 +38,7 @@ def completion_with_websearch(): # Create chat completion response = client.chat.completions.create( - model='glm-4', + model='glm-4.6', messages=[ {'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'What is artificial intelligence?'}, @@ -52,7 +52,7 @@ def completion_with_websearch(): }, } ], - temperature=0.5, + temperature=1.0, max_tokens=2000, ) @@ -234,10 +234,10 @@ def ofZhipu(): print(response.choices[0].message.content) if __name__ == '__main__': - # completion() - # completion_with_stream() + completion() + completion_with_stream() # completion_with_websearch() - multi_modal_chat() + # multi_modal_chat() # role_play() # assistant_conversation() # video_generation() diff --git a/examples/stream_tools.py b/examples/stream_tools.py new file mode 100644 index 0000000..a479bc1 --- /dev/null +++ b/examples/stream_tools.py @@ -0,0 +1,81 @@ +from zai import ZhipuAiClient + +def main(): + client = ZhipuAiClient() + # create chat completion with tool calls and streaming + response = client.chat.completions.create( + model="glm-4.6", + messages=[ + {"role": "user", "content": "How is the weather in Beijing and Shanghai? Please provide the answer in Celsius."}, + ], + tools=[ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get the weather information for a specific location", + "parameters": { + "type": "object", + "properties": { + "location": {"type": "string", "description": "City, eg: Beijing, Shanghai"}, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]} + }, + "required": ["location"] + } + } + } + ], + stream=True, # enable streaming + tool_stream=True # enable tool call streaming + ) + + # init variables to collect streaming data + reasoning_content = "" # reasoning content + content = "" # response content + final_tool_calls = {} # tool call data + reasoning_started = False # is reasoning started + content_started = False # is content started + + # process streaming response + for chunk in response: + if not chunk.choices: + continue + + delta = chunk.choices[0].delta + + # process streaming reasoning output + if hasattr(delta, 'reasoning_content') and delta.reasoning_content: + if not reasoning_started and delta.reasoning_content.strip(): + print("\n🧠 Thinking: ") + reasoning_started = True + reasoning_content += delta.reasoning_content + print(delta.reasoning_content, end="", flush=True) + + # process streaming answer content output + if hasattr(delta, 'content') and delta.content: + if not content_started and delta.content.strip(): + print("\n\n💬 Answer: ") + content_started = True + content += delta.content + print(delta.content, end="", flush=True) + + # process streaming tool call info + if delta.tool_calls: + for tool_call in delta.tool_calls: + index = tool_call.index + if index not in final_tool_calls: + # add new tool call + final_tool_calls[index] = tool_call + final_tool_calls[index].function.arguments = tool_call.function.arguments + else: + # append tool call params by streaming index + final_tool_calls[index].function.arguments += tool_call.function.arguments + + # output the final construct tool call info + if final_tool_calls: + print("\n📋 Function Calls Triggered:") + for index, tool_call in final_tool_calls.items(): + print(f" {index}: Function Name: {tool_call.function.name}, Params: {tool_call.function.arguments}") + +if __name__ == "__main__": + main() \ No newline at end of file