Open
Description
After My test, I find async aiologger is much slower than sync logging!!!
And, I test with sanic web frame, aiologger is slower again.
If you know why it happen, please tell me the reason.
All test run my MacbookPro M1.
async
# coding: utf8
import asyncio
import logging
from aiologger import Logger
from aiologger.formatters.base import Formatter
from aiologger.handlers.files import AsyncTimedRotatingFileHandler, BaseAsyncRotatingFileHandler, AsyncFileHandler
from pathlib import Path
import sys
from aiologger.handlers.streams import AsyncStreamHandler
import time
BASE_DIR = Path().cwd()
def get_logger(name: str = "log") -> Logger:
"""异步日志,按日期切割"""
logger: Logger = Logger(name=name, level=logging.DEBUG)
logfile_path = Path(BASE_DIR, "logs", f"{name}.log")
logfile_path.parent.mkdir(parents=True, exist_ok=True)
logfile_path.touch(exist_ok=True)
# 输出到文件
atr_file_handler = AsyncTimedRotatingFileHandler(
filename=str(logfile_path),
interval=1,
backup_count=30,
encoding="utf8"
)
# formatter = Formatter("%(asctime)s | %(levelname)s | %(filename)s:%(lineno)d | %(message)s")
formatter = Formatter('%(asctime)s %(filename)30s[line:%(lineno)3d] [%(name)s] %(levelname)s %(message)s')
atr_file_handler.formatter = formatter
logger.add_handler(atr_file_handler)
# 输出到控制台
stream_handler = AsyncStreamHandler()
stream_handler.formatter = formatter
logger.add_handler(stream_handler)
return logger
logger = get_logger("b")
async def main():
for _ in range(10000):
await logger.info("word")
# await logger.info("word")
await logger.shutdown()
if __name__ == '__main__':
start = time.time()
# asyncio.run(main())
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main())
asyncio.run(main())
end = time.time()
runtime = end-start
print(f"runtime: {runtime}") # 2.16seconds
sync
# encoding: utf-8
import logging.handlers
import os
import threading
CACHE = {}
_lock = threading.Lock()
def get_logger(*args, **kwargs):
with _lock:
return _get_logger(*args, **kwargs)
def _get_logger(logger_name, rotate_type="file", info_only=False, backup_count=5, folder="logs"):
if logger_name in CACHE:
return CACHE[logger_name]
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
BASE_DIR = os.path.dirname(os.path.abspath((__file__)))
BASE_DIR = os.path.join(BASE_DIR, folder)
if not os.path.exists(BASE_DIR):
os.mkdir(BASE_DIR)
if info_only:
info_file_name = "{}.log".format(logger_name)
else:
info_file_name = "{}_info.log".format(logger_name)
LOGS_DIR = os.path.join(BASE_DIR, info_file_name)
if rotate_type == "time":
file_handler = logging.handlers.TimedRotatingFileHandler(LOGS_DIR, 'D', 1, backupCount=backup_count)
else:
file_handler = logging.handlers.RotatingFileHandler(LOGS_DIR, mode='a', maxBytes=100*1024*1024,
backupCount=backup_count)
file_handler.setLevel(logging.INFO)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)-15s %(processName)s %(threadName)-10s %(filename)30s[line:%(lineno)3d] [%(name)s] %(levelname)s %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
CACHE[logger_name] = logger
return logger
logger = get_logger("a", info_only=True)
def main():
for _ in range(10000):
logger.info("word")
if __name__ == '__main__':
start = time.time()
main()
end = time.time()
runtime = end-start
print(f"runtime: {runtime}") # 0.45seconds
Metadata
Metadata
Assignees
Labels
No labels