Skip to content

Commit

Permalink
Merge pull request #545 from TEN-framework/fix/fix-tools
Browse files Browse the repository at this point in the history
fix: fix all send_cmds
  • Loading branch information
plutoless authored Dec 31, 2024
2 parents 92dbfcf + 7c85b51 commit 6c873a3
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
Cmd,
)
from ten.async_ten_env import AsyncTenEnv
from ten_ai_base.helper import get_properties_string
from ten_ai_base.config import BaseConfig
from ten_ai_base import AsyncLLMToolBaseExtension
from ten_ai_base.types import LLMToolMetadata, LLMToolMetadataParameter, LLMToolResult

Expand Down Expand Up @@ -61,13 +61,15 @@
# 2. https://learn.microsoft.com/en-us/bing/search-apis/bing-custom-search/overview
# 3. https://azure.microsoft.com/en-in/updates/bing-search-apis-will-transition-from-azure-cognitive-services-to-azure-marketplace-on-31-october-2023/

class BingSearchToolConfig(BaseConfig):
api_key: str = ""

class BingSearchToolExtension(AsyncLLMToolBaseExtension):

def __init__(self, name: str) -> None:
super().__init__(name)
self.api_key = None
self.session = None
self.config = None
self.k = 10

async def on_init(self, ten_env: AsyncTenEnv) -> None:
Expand All @@ -79,10 +81,9 @@ async def on_start(self, ten_env: AsyncTenEnv) -> None:
ten_env.log_debug("on_start")
await super().on_start(ten_env)

get_properties_string(
ten_env, [PROPERTY_API_KEY], lambda name, value: setattr(self, name, value)
)
if not self.api_key:
self.config = await BingSearchToolConfig.create_async(ten_env=ten_env)

if not self.config.api_key:
ten_env.log_info("API key is missing, exiting on_start")
return

Expand Down Expand Up @@ -146,7 +147,7 @@ async def _initialize_session(self, ten_env: AsyncTenEnv):

async def _bing_search_results(self, ten_env: AsyncTenEnv, search_term: str, count: int) -> List[dict]:
await self._initialize_session(ten_env)
headers = {"Ocp-Apim-Subscription-Key": self.api_key}
headers = {"Ocp-Apim-Subscription-Key": self.config.api_key}
params = {
"q": search_term,
"count": count,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,7 @@ async def _handle_tool_call(self, func_calls: list[FunctionCall]) -> None:
cmd: Cmd = Cmd.create(CMD_TOOL_CALL)
cmd.set_property_string("name", name)
cmd.set_property_from_json("arguments", json.dumps(arguments))
result: CmdResult = await self.ten_env.send_cmd(cmd)
[result, _] = await self.ten_env.send_cmd(cmd)

func_response = FunctionResponse(
id=tool_call_id, name=name, response={"error": "Failed to call tool"}
Expand Down
4 changes: 2 additions & 2 deletions agents/ten_packages/extension/glue_python_async/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ async def on_start(self, ten_env: AsyncTenEnv) -> None:
self.memory = ChatMemory(self.config.max_history)

if self.config.enable_storage:
result = await ten_env.send_cmd(Cmd.create("retrieve"))
[result, _] = await ten_env.send_cmd(Cmd.create("retrieve"))
if result.get_status_code() == StatusCode.OK:
try:
history = json.loads(result.get_property_string("response"))
Expand Down Expand Up @@ -382,7 +382,7 @@ async def handle_tool_call(self, call: ToolCall) -> ToolCallResponse:
cmd.set_property_from_json("arguments", call.function.arguments)

# Send the command and handle the result through the future
result: CmdResult = await self.ten_env.send_cmd(cmd)
[result, _] = await self.ten_env.send_cmd(cmd)
if result.get_status_code() == StatusCode.OK:
tool_result: LLMToolResult = json.loads(
result.get_property_to_json(CMD_PROPERTY_RESULT)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ async def _handle_text_data(self, data: dict):
json_bytes = json.dumps(data).encode("utf-8")
cmd = Cmd.create("publish")
cmd.set_property_buf("message", json_bytes)
cmd_result: CmdResult = await self.ten_env.send_cmd(cmd)
[cmd_result, _] = await self.ten_env.send_cmd(cmd)
self.ten_env.log_info(f"send_cmd result {cmd_result.to_json()}")
except Exception as e:
self.ten_env.log_error(f"Failed to handle text data: {e}")
Expand All @@ -225,7 +225,7 @@ async def _handle_user_state(self, data: dict):
json_bytes = json.dumps(data)
cmd = Cmd.create("set_presence_state")
cmd.set_property_string("states", json_bytes)
cmd_result: CmdResult = await self.ten_env.send_cmd(cmd)
[cmd_result, _] = await self.ten_env.send_cmd(cmd)
self.ten_env.log_info(f"send_cmd result {cmd_result.to_json()}")
except Exception as e:
self.ten_env.log_error(f"Failed to handle user state: {e}")
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ async def on_cmd(self, ten_env: AsyncTenEnv, cmd: Cmd) -> None:
match cmd_name:
case "flush":
await self._flush(ten_env=ten_env)
_result = await ten_env.send_cmd(Cmd.create("flush"))
await ten_env.send_cmd(Cmd.create("flush"))
ten_env.log_debug("flush done")
case _:
pass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ async def handle_tool_call(tool_call):
# cmd.set_property_from_json("arguments", json.dumps([]))

# Send the command and handle the result through the future
result: CmdResult = await async_ten_env.send_cmd(cmd)
[result, _] = await async_ten_env.send_cmd(cmd)
if result.get_status_code() == StatusCode.OK:
tool_result: LLMToolResult = json.loads(
result.get_property_to_json(CMD_PROPERTY_RESULT)
Expand Down
4 changes: 2 additions & 2 deletions agents/ten_packages/extension/openai_v2v_python/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ async def on_start(self, ten_env: AsyncTenEnv) -> None:
self.memory = ChatMemory(self.config.max_history)

if self.config.enable_storage:
result = await ten_env.send_cmd(Cmd.create("retrieve"))
[result, _] = await ten_env.send_cmd(Cmd.create("retrieve"))
if result.get_status_code() == StatusCode.OK:
try:
history = json.loads(result.get_property_string("response"))
Expand Down Expand Up @@ -687,7 +687,7 @@ async def _handle_tool_call(
cmd: Cmd = Cmd.create(CMD_TOOL_CALL)
cmd.set_property_string("name", name)
cmd.set_property_from_json("arguments", arguments)
result: CmdResult = await self.ten_env.send_cmd(cmd)
[result, _] = await self.ten_env.send_cmd(cmd)

tool_response = ItemCreate(
item=FunctionCallOutputItemParam(
Expand Down

0 comments on commit 6c873a3

Please sign in to comment.