Skip to content

Commit 37467c8

Browse files
committed
feat: Update AISuite LLM extension with new configuration and improved error handling
1 parent a0e2f35 commit 37467c8

File tree

4 files changed

+31
-75
lines changed

4 files changed

+31
-75
lines changed

agents/examples/default/property.json

+18-64
Original file line numberDiff line numberDiff line change
@@ -283,34 +283,35 @@
283283
"agora_asr_session_control_file_path": "session_control.conf"
284284
}
285285
},
286+
{
287+
"type": "extension",
288+
"extension_group": "tts",
289+
"addon": "azure_tts",
290+
"name": "tts",
291+
"property": {
292+
"azure_subscription_key": "${env:AZURE_TTS_KEY}",
293+
"azure_subscription_region": "${env:AZURE_TTS_REGION}",
294+
"azure_synthesis_voice_name": "en-US-AndrewMultilingualNeural"
295+
}
296+
},
286297
{
287298
"type": "extension",
288299
"name": "llm",
289-
"addon": "openai_chatgpt_python",
300+
"addon": "aisuite_llm_python",
290301
"extension_group": "chatgpt",
291302
"property": {
292-
"api_key": "${env:OPENAI_API_KEY}",
293303
"base_url": "",
294304
"frequency_penalty": 0.9,
295305
"greeting": "TEN Agent connected. How can I help you today?",
296306
"max_memory_length": 10,
297307
"max_tokens": 512,
298-
"model": "${env:OPENAI_MODEL}",
308+
"model": "openai:gpt-4o-mini",
299309
"prompt": "",
300-
"proxy_url": "${env:OPENAI_PROXY_URL}"
301-
}
302-
},
303-
{
304-
"type": "extension",
305-
"name": "tts",
306-
"addon": "fish_audio_tts",
307-
"extension_group": "tts",
308-
"property": {
309-
"api_key": "${env:FISH_AUDIO_TTS_KEY}",
310-
"model_id": "d8639b5cc95548f5afbcfe22d3ba5ce5",
311-
"optimize_streaming_latency": true,
312-
"request_timeout_seconds": 30,
313-
"base_url": "https://api.fish.audio"
310+
"provider_config": {
311+
"openai": {
312+
"api_key": "${env:OPENAI_API_KEY}"
313+
}
314+
}
314315
}
315316
},
316317
{
@@ -326,15 +327,6 @@
326327
"addon": "message_collector",
327328
"extension_group": "transcriber",
328329
"property": {}
329-
},
330-
{
331-
"type": "extension",
332-
"name": "weatherapi_tool_python",
333-
"addon": "weatherapi_tool_python",
334-
"extension_group": "default",
335-
"property": {
336-
"api_key": "${env:WEATHERAPI_API_KEY|}"
337-
}
338330
}
339331
],
340332
"connections": [
@@ -390,14 +382,6 @@
390382
"extension": "tts"
391383
}
392384
]
393-
},
394-
{
395-
"name": "tool_call",
396-
"dest": [
397-
{
398-
"extension": "weatherapi_tool_python"
399-
}
400-
]
401385
}
402386
],
403387
"data": [
@@ -472,19 +456,6 @@
472456
]
473457
}
474458
]
475-
},
476-
{
477-
"extension": "weatherapi_tool_python",
478-
"cmd": [
479-
{
480-
"name": "tool_register",
481-
"dest": [
482-
{
483-
"extension": "llm"
484-
}
485-
]
486-
}
487-
]
488459
}
489460
]
490461
},
@@ -509,23 +480,6 @@
509480
"subscribe_audio_sample_rate": 24000
510481
}
511482
},
512-
{
513-
"type": "extension",
514-
"name": "v2v",
515-
"addon": "openai_v2v_python",
516-
"extension_group": "llm",
517-
"property": {
518-
"api_key": "${env:OPENAI_REALTIME_API_KEY}",
519-
"temperature": 0.9,
520-
"model": "gpt-4o-realtime-preview-2024-12-17",
521-
"max_tokens": 2048,
522-
"voice": "alloy",
523-
"language": "en-US",
524-
"server_vad": true,
525-
"dump": true,
526-
"max_history": 10
527-
}
528-
},
529483
{
530484
"type": "extension",
531485
"name": "message_collector",

agents/ten_packages/extension/aisuite_llm_python/aisuite_llm_extension.py

+9-6
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import threading
1919
import re
2020
import aisuite as ai
21+
import traceback
2122

2223
DATA_OUT_TEXT_DATA_PROPERTY_TEXT = "text"
2324
DATA_OUT_TEXT_DATA_PROPERTY_TEXT_END_OF_SEGMENT = "end_of_segment"
@@ -28,7 +29,6 @@ def __init__(self, name: str):
2829
super().__init__(name)
2930
self.history = []
3031
self.provider_config = {}
31-
self.api_key = ""
3232
self.model = ""
3333
self.prompt = ""
3434
self.max_tokens = 512
@@ -44,6 +44,7 @@ def __init__(self, name: str):
4444
self.mutex = threading.Lock()
4545

4646
self.client = None
47+
self.ten_env = None
4748

4849
def on_msg(self, role: str, content: str) -> None:
4950
self.mutex.acquire()
@@ -133,7 +134,7 @@ def callback(text: str, end_of_segment: bool):
133134
callback(total, True) # callback once until full answer returned
134135

135136
def stream_chat(self, ts: datetime.time, messages: List[Any], callback):
136-
ten = self.ten
137+
ten = self.ten_env
137138
ten.log_info(f"before stream_chat call {messages} {ts}")
138139

139140
if self.need_interrupt(ts):
@@ -186,11 +187,13 @@ def stream_chat(self, ts: datetime.time, messages: List[Any], callback):
186187

187188
def on_start(self, ten: TenEnv) -> None:
188189
ten.log_info("on_start")
189-
self.provider_config = ten.get_property_string("provider_config")
190+
self.provider_config = ten.get_property_to_json("provider_config")
190191
self.model = ten.get_property_string("model")
191192
self.prompt = ten.get_property_string("prompt")
192193
self.max_history = ten.get_property_int("max_memory_length")
193194
self.max_tokens = ten.get_property_int("max_tokens")
195+
self.ten_env = ten
196+
ten.log_info(f"provider_config {self.provider_config}")
194197
self.client = ai.Client(json.loads(self.provider_config))
195198
greeting = ten.get_property_string("greeting")
196199

@@ -242,7 +245,7 @@ def on_data(self, ten: TenEnv, data: Data) -> None:
242245
return
243246

244247
ts = datetime.now()
245-
ten.log_info("on data %s, %s", input_text, ts)
248+
ten.log_info(f"on data {input_text}, {ts}")
246249
self.queue.put((input_text, ts))
247250

248251
def async_handle(self, ten: TenEnv):
@@ -262,7 +265,7 @@ def async_handle(self, ten: TenEnv):
262265
ten.log_info(f"fetched from queue {chat_input.get_name()}")
263266
self.call_chat(ten, ts, chat_input)
264267
except Exception as e:
265-
ten.log_error(str(e))
268+
ten.log_error(str(e), traceback.print_exc())
266269

267270
def on_cmd(self, ten: TenEnv, cmd: Cmd) -> None:
268271
ts = datetime.now()
@@ -274,7 +277,7 @@ def on_cmd(self, ten: TenEnv, cmd: Cmd) -> None:
274277
cmd_out = Cmd.create("flush")
275278
ten.send_cmd(
276279
cmd_out,
277-
lambda ten, result: ten.log_info("send_cmd flush done"),
280+
lambda ten, result, error: ten.log_info("send_cmd flush done"),
278281
)
279282
elif cmd_name == "call_chat":
280283
self.queue.put((cmd, ts))

agents/ten_packages/extension/aisuite_llm_python/manifest.json

+2-4
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,6 @@
1111
],
1212
"api": {
1313
"property": {
14-
"api_key": {
15-
"type": "string"
16-
},
1714
"model": {
1815
"type": "string"
1916
},
@@ -30,7 +27,8 @@
3027
"type": "int64"
3128
},
3229
"provider_config": {
33-
"type": "string"
30+
"type": "object",
31+
"properties":{}
3432
}
3533
},
3634
"data_in": [
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
aisuite
1+
aisuite
2+
aisuite[all]

0 commit comments

Comments
 (0)