From bc47d0929eb426d634ede8e11e0375f0fa5218f9 Mon Sep 17 00:00:00 2001 From: sunshinexcode <24xinhui@163.com> Date: Thu, 26 Dec 2024 09:50:31 +0000 Subject: [PATCH] chore(): use the ten_env logging module uniformly --- .../extension/llama_index_chat_engine/log.py | 13 ----------- .../message_collector/src/extension.py | 6 ++--- .../extension/minimax_v2v_python/extension.py | 2 +- .../extension/openai_chatgpt_python/BUILD.gn | 1 - .../openai_chatgpt_python/__init__.py | 3 --- .../extension/openai_chatgpt_python/log.py | 22 ------------------- .../extension/vision_tool_python/log.py | 20 ----------------- 7 files changed, 4 insertions(+), 63 deletions(-) delete mode 100644 agents/ten_packages/extension/llama_index_chat_engine/log.py delete mode 100644 agents/ten_packages/extension/openai_chatgpt_python/log.py delete mode 100644 agents/ten_packages/extension/vision_tool_python/log.py diff --git a/agents/ten_packages/extension/llama_index_chat_engine/log.py b/agents/ten_packages/extension/llama_index_chat_engine/log.py deleted file mode 100644 index 0804a279..00000000 --- a/agents/ten_packages/extension/llama_index_chat_engine/log.py +++ /dev/null @@ -1,13 +0,0 @@ -import logging - -logger = logging.getLogger("llama_index_chat_engine") -logger.setLevel(logging.INFO) - -formatter = logging.Formatter( - "%(asctime)s - %(name)s - %(levelname)s - %(process)d - [%(filename)s:%(lineno)d] - %(message)s" -) - -console_handler = logging.StreamHandler() -console_handler.setFormatter(formatter) - -logger.addHandler(console_handler) diff --git a/agents/ten_packages/extension/message_collector/src/extension.py b/agents/ten_packages/extension/message_collector/src/extension.py index 1bae37be..229285cd 100644 --- a/agents/ten_packages/extension/message_collector/src/extension.py +++ b/agents/ten_packages/extension/message_collector/src/extension.py @@ -37,7 +37,7 @@ MAX_CHUNK_SIZE_BYTES = 1024 -def _text_to_base64_chunks(text: str, msg_id: str) -> list: +def _text_to_base64_chunks(ten_env: TenEnv, text: str, msg_id: str) -> list: # Ensure msg_id does not exceed 50 characters if len(msg_id) > 36: raise ValueError("msg_id cannot exceed 36 characters.") @@ -85,7 +85,7 @@ def _text_to_base64_chunks(text: str, msg_id: str) -> list: estimated_chunk_size -= 100 # Reduce content size gradually count += 1 - # logger.debug(f"chunk estimate guess: {count}") + # ten_env.log_debug(f"chunk estimate guess: {count}") # Add the current chunk to the list chunks.append(formatted_chunk) @@ -215,7 +215,7 @@ def on_data(self, ten_env: TenEnv, data: Data) -> None: } try: - chunks = _text_to_base64_chunks(json.dumps(base_msg_data), message_id) + chunks = _text_to_base64_chunks(ten_env, json.dumps(base_msg_data), message_id) for chunk in chunks: asyncio.run_coroutine_threadsafe(self._queue_message(chunk), self.loop) diff --git a/agents/ten_packages/extension/minimax_v2v_python/extension.py b/agents/ten_packages/extension/minimax_v2v_python/extension.py index fb8bc38c..6a253d93 100644 --- a/agents/ten_packages/extension/minimax_v2v_python/extension.py +++ b/agents/ten_packages/extension/minimax_v2v_python/extension.py @@ -245,7 +245,7 @@ async def _complete_with_history(self, ts: datetime, buff: bytearray): i = 0 async for line in response.aiter_lines(): - # logger.info(f"-> line {line}") + # ten_env.log_info(f"-> line {line}") # if self._need_interrupt(ts): # ten_env.log_warn(f"trace-id: {trace_id}, interrupted") # if self.transcript: diff --git a/agents/ten_packages/extension/openai_chatgpt_python/BUILD.gn b/agents/ten_packages/extension/openai_chatgpt_python/BUILD.gn index 23f06108..0b03f3ef 100644 --- a/agents/ten_packages/extension/openai_chatgpt_python/BUILD.gn +++ b/agents/ten_packages/extension/openai_chatgpt_python/BUILD.gn @@ -14,7 +14,6 @@ ten_package("openai_chatgpt_python") { "__init__.py", "addon.py", "extension.py", - "log.py", "manifest.json", "property.json", ] diff --git a/agents/ten_packages/extension/openai_chatgpt_python/__init__.py b/agents/ten_packages/extension/openai_chatgpt_python/__init__.py index 09a409ff..8cd75dde 100644 --- a/agents/ten_packages/extension/openai_chatgpt_python/__init__.py +++ b/agents/ten_packages/extension/openai_chatgpt_python/__init__.py @@ -6,6 +6,3 @@ # # from . import addon -from .log import logger - -logger.info("openai_chatgpt_python extension loaded") diff --git a/agents/ten_packages/extension/openai_chatgpt_python/log.py b/agents/ten_packages/extension/openai_chatgpt_python/log.py deleted file mode 100644 index 1813e965..00000000 --- a/agents/ten_packages/extension/openai_chatgpt_python/log.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# -# Agora Real Time Engagement -# Created by Wei Hu in 2024-08. -# Copyright (c) 2024 Agora IO. All rights reserved. -# -# -import logging - -logger = logging.getLogger("openai_chatgpt_python") -logger.setLevel(logging.INFO) - -formatter_str = ( - "%(asctime)s - %(name)s - %(levelname)s - %(process)d - " - "[%(filename)s:%(lineno)d] - %(message)s" -) -formatter = logging.Formatter(formatter_str) - -console_handler = logging.StreamHandler() -console_handler.setFormatter(formatter) - -logger.addHandler(console_handler) diff --git a/agents/ten_packages/extension/vision_tool_python/log.py b/agents/ten_packages/extension/vision_tool_python/log.py deleted file mode 100644 index 6e4e495f..00000000 --- a/agents/ten_packages/extension/vision_tool_python/log.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# This file is part of TEN Framework, an open source project. -# Licensed under the Apache License, Version 2.0. -# See the LICENSE file for more information. -# -import logging - -logger = logging.getLogger("vision_tool_python") -logger.setLevel(logging.INFO) - -formatter_str = ( - "%(asctime)s - %(name)s - %(levelname)s - %(process)d - " - "[%(filename)s:%(lineno)d] - %(message)s" -) -formatter = logging.Formatter(formatter_str) - -console_handler = logging.StreamHandler() -console_handler.setFormatter(formatter) - -logger.addHandler(console_handler)