Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions examples/basic_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ def completion():

# Create chat completion
response = client.chat.completions.create(
model='glm-4',
model='glm-4.6',
messages=[{'role': 'user', 'content': 'Hello, Z.ai!'}],
temperature=0.7,
temperature=1.0,
)
print(response.choices[0].message.content)

Expand All @@ -19,7 +19,7 @@ def completion_with_stream():

# Create chat completion
response = client.chat.completions.create(
model='glm-4',
model='glm-4.6',
messages=[
{'role': 'system', 'content': 'You are a helpful assistant.'},
{'role': 'user', 'content': 'Tell me a story about AI.'},
Expand All @@ -38,7 +38,7 @@ def completion_with_websearch():

# Create chat completion
response = client.chat.completions.create(
model='glm-4',
model='glm-4.6',
messages=[
{'role': 'system', 'content': 'You are a helpful assistant.'},
{'role': 'user', 'content': 'What is artificial intelligence?'},
Expand All @@ -52,7 +52,7 @@ def completion_with_websearch():
},
}
],
temperature=0.5,
temperature=1.0,
max_tokens=2000,
)

Expand Down Expand Up @@ -234,10 +234,10 @@ def ofZhipu():
print(response.choices[0].message.content)

if __name__ == '__main__':
# completion()
# completion_with_stream()
completion()
completion_with_stream()
# completion_with_websearch()
multi_modal_chat()
# multi_modal_chat()
# role_play()
# assistant_conversation()
# video_generation()
Expand Down
81 changes: 81 additions & 0 deletions examples/stream_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
from zai import ZhipuAiClient

def main():
client = ZhipuAiClient()
# create chat completion with tool calls and streaming
response = client.chat.completions.create(
model="glm-4.6",
messages=[
{"role": "user", "content": "How is the weather in Beijing and Shanghai? Please provide the answer in Celsius."},
],
tools=[
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the weather information for a specific location",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "City, eg: Beijing, Shanghai"},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}
},
"required": ["location"]
}
}
}
],
stream=True, # enable streaming
tool_stream=True # enable tool call streaming
)

# init variables to collect streaming data
reasoning_content = "" # reasoning content
content = "" # response content
final_tool_calls = {} # tool call data
reasoning_started = False # is reasoning started
content_started = False # is content started

# process streaming response
for chunk in response:
if not chunk.choices:
continue

delta = chunk.choices[0].delta

# process streaming reasoning output
if hasattr(delta, 'reasoning_content') and delta.reasoning_content:
if not reasoning_started and delta.reasoning_content.strip():
print("\n🧠 Thinking: ")
reasoning_started = True
reasoning_content += delta.reasoning_content
print(delta.reasoning_content, end="", flush=True)

# process streaming answer content output
if hasattr(delta, 'content') and delta.content:
if not content_started and delta.content.strip():
print("\n\n💬 Answer: ")
content_started = True
content += delta.content
print(delta.content, end="", flush=True)

# process streaming tool call info
if delta.tool_calls:
for tool_call in delta.tool_calls:
index = tool_call.index
if index not in final_tool_calls:
# add new tool call
final_tool_calls[index] = tool_call
final_tool_calls[index].function.arguments = tool_call.function.arguments
else:
# append tool call params by streaming index
final_tool_calls[index].function.arguments += tool_call.function.arguments

# output the final construct tool call info
if final_tool_calls:
print("\n📋 Function Calls Triggered:")
for index, tool_call in final_tool_calls.items():
print(f" {index}: Function Name: {tool_call.function.name}, Params: {tool_call.function.arguments}")

if __name__ == "__main__":
main()