diff --git a/src/anomalib/cli/pipelines.py b/src/anomalib/cli/pipelines.py index 8cfb04fd2e..ba6030491b 100644 --- a/src/anomalib/cli/pipelines.py +++ b/src/anomalib/cli/pipelines.py @@ -6,13 +6,13 @@ import logging from jsonargparse import Namespace -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from anomalib.cli.utils.help_formatter import get_short_docstring logger = logging.getLogger(__name__) -if package_available("anomalib.pipelines"): +if module_available("anomalib.pipelines"): from anomalib.pipelines import Benchmark from anomalib.pipelines.components.base import Pipeline diff --git a/src/anomalib/cli/utils/openvino.py b/src/anomalib/cli/utils/openvino.py index ee54bf09b2..50a894c304 100644 --- a/src/anomalib/cli/utils/openvino.py +++ b/src/anomalib/cli/utils/openvino.py @@ -6,12 +6,12 @@ import logging from jsonargparse import ArgumentParser -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available logger = logging.getLogger(__name__) -if package_available("openvino"): +if module_available("openvino"): from openvino.tools.ovc.cli_parser import get_common_cli_parser else: get_common_cli_parser = None diff --git a/src/anomalib/deploy/inferencers/openvino_inferencer.py b/src/anomalib/deploy/inferencers/openvino_inferencer.py index 8dea77b92e..b85df0536c 100644 --- a/src/anomalib/deploy/inferencers/openvino_inferencer.py +++ b/src/anomalib/deploy/inferencers/openvino_inferencer.py @@ -9,7 +9,7 @@ import cv2 import numpy as np -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from omegaconf import DictConfig from PIL import Image @@ -94,7 +94,7 @@ def __init__( task: str | None = None, config: dict | None = None, ) -> None: - if not package_available("openvino"): + if not module_available("openvino"): msg = "OpenVINO is not installed. Please install OpenVINO to use OpenVINOInferencer." raise ImportError(msg) diff --git a/src/anomalib/loggers/wandb.py b/src/anomalib/loggers/wandb.py index 55e65e6d54..ff41a0949e 100644 --- a/src/anomalib/loggers/wandb.py +++ b/src/anomalib/loggers/wandb.py @@ -9,12 +9,12 @@ from lightning.fabric.utilities.types import _PATH from lightning.pytorch.loggers.wandb import WandbLogger from lightning.pytorch.utilities import rank_zero_only -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from matplotlib.figure import Figure from .base import ImageLoggerBase -if package_available("wandb"): +if module_available("wandb"): import wandb if TYPE_CHECKING: diff --git a/src/anomalib/models/components/base/export_mixin.py b/src/anomalib/models/components/base/export_mixin.py index d11b50ff99..327cb87e02 100644 --- a/src/anomalib/models/components/base/export_mixin.py +++ b/src/anomalib/models/components/base/export_mixin.py @@ -12,7 +12,7 @@ import numpy as np import torch -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from torch import nn from torchmetrics import Metric from torchvision.transforms.v2 import Transform @@ -245,7 +245,7 @@ def to_openvino( ... task="segmentation", ... ) """ - if not package_available("openvino"): + if not module_available("openvino"): logger.exception("Could not find OpenVINO. Please check OpenVINO installation.") raise ModuleNotFoundError @@ -294,7 +294,7 @@ def _compress_ov_model( Returns: model (CompiledModel): Model in the OpenVINO format compressed with NNCF quantization. """ - if not package_available("nncf"): + if not module_available("nncf"): logger.exception("Could not find NCCF. Please check NNCF installation.") raise ModuleNotFoundError diff --git a/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py b/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py index 741288354f..53648e688a 100644 --- a/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py +++ b/src/anomalib/models/image/vlm_ad/backends/chat_gpt.py @@ -10,13 +10,13 @@ from typing import TYPE_CHECKING from dotenv import load_dotenv -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from anomalib.models.image.vlm_ad.utils import Prompt from .base import Backend -if package_available("openai"): +if module_available("openai"): from openai import OpenAI else: OpenAI = None diff --git a/src/anomalib/models/image/vlm_ad/backends/huggingface.py b/src/anomalib/models/image/vlm_ad/backends/huggingface.py index e25e9dccb3..e8d3c1e84b 100644 --- a/src/anomalib/models/image/vlm_ad/backends/huggingface.py +++ b/src/anomalib/models/image/vlm_ad/backends/huggingface.py @@ -7,7 +7,7 @@ from pathlib import Path from typing import TYPE_CHECKING -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from PIL import Image from anomalib.models.image.vlm_ad.utils import Prompt @@ -18,7 +18,7 @@ from transformers.modeling_utils import PreTrainedModel from transformers.processing_utils import ProcessorMixin -if package_available("transformers"): +if module_available("transformers"): import transformers else: transformers = None diff --git a/src/anomalib/models/image/vlm_ad/backends/ollama.py b/src/anomalib/models/image/vlm_ad/backends/ollama.py index db5a215bb3..ff680bee3b 100644 --- a/src/anomalib/models/image/vlm_ad/backends/ollama.py +++ b/src/anomalib/models/image/vlm_ad/backends/ollama.py @@ -12,13 +12,13 @@ import logging from pathlib import Path -from lightning_utilities.core.imports import package_available +from lightning_utilities.core.imports import module_available from anomalib.models.image.vlm_ad.utils import Prompt from .base import Backend -if package_available("ollama"): +if module_available("ollama"): from ollama import chat from ollama._client import _encode_image else: diff --git a/src/anomalib/pipelines/benchmark/job.py b/src/anomalib/pipelines/benchmark/job.py index f56899ac5d..840bda2dd1 100644 --- a/src/anomalib/pipelines/benchmark/job.py +++ b/src/anomalib/pipelines/benchmark/job.py @@ -23,6 +23,41 @@ logger = logging.getLogger(__name__) +# Import external loggers +AVAILABLE_LOGGERS: dict[str, Any] = {} + +try: + from anomalib.loggers import AnomalibCometLogger + + AVAILABLE_LOGGERS["comet"] = AnomalibCometLogger +except ImportError: + logger.debug("Comet logger not available. Install using `pip install comet-ml`") +try: + from anomalib.loggers import AnomalibMLFlowLogger + + AVAILABLE_LOGGERS["mlflow"] = AnomalibMLFlowLogger +except ImportError: + logger.debug("MLflow logger not available. Install using `pip install mlflow`") +try: + from anomalib.loggers import AnomalibTensorBoardLogger + + AVAILABLE_LOGGERS["tensorboard"] = AnomalibTensorBoardLogger +except ImportError: + logger.debug("TensorBoard logger not available. Install using `pip install tensorboard`") +try: + from anomalib.loggers import AnomalibWandbLogger + + AVAILABLE_LOGGERS["wandb"] = AnomalibWandbLogger +except ImportError: + logger.debug("Weights & Biases logger not available. Install using `pip install wandb`") + +LOGGERS_AVAILABLE = len(AVAILABLE_LOGGERS) > 0 + +if LOGGERS_AVAILABLE: + logger.info(f"Available loggers: {', '.join(AVAILABLE_LOGGERS.keys())}") +else: + logger.warning("No external loggers available. Install required packages using `anomalib install -v`") + class BenchmarkJob(Job): """Benchmarking job. @@ -69,6 +104,7 @@ def run( accelerator=self.accelerator, devices=devices, default_root_dir=temp_dir, + logger=self._initialize_loggers(self.flat_cfg or {}) if LOGGERS_AVAILABLE else [], ) fit_start_time = time.time() engine.fit(self.model, self.datamodule) @@ -89,8 +125,44 @@ def run( **test_results[0], } logger.info(f"Completed with result {output}") + # Logging metrics to External Loggers (excluding TensorBoard) + trainer = engine.trainer() + for logger_instance in trainer.loggers: + if any( + isinstance(logger_instance, AVAILABLE_LOGGERS.get(name, object)) + for name in ["comet", "wandb", "mlflow"] + ): + logger_instance.log_metrics(test_results[0]) + logger.debug(f"Successfully logged metrics to {logger_instance.__class__.__name__}") return output + @staticmethod + def _initialize_loggers(logger_configs: dict[str, dict[str, Any]]) -> list[Any]: + """Initialize configured external loggers. + + Args: + logger_configs: Dictionary mapping logger names to their configurations. + + Returns: + List of initialized loggers. + """ + active_loggers = [] + default_configs = { + "tensorboard": {"save_dir": "logs/benchmarks"}, + "comet": {"project_name": "anomalib"}, + "wandb": {"project": "anomalib"}, + "mlflow": {"experiment_name": "anomalib"}, + } + + for logger_name, logger_class in AVAILABLE_LOGGERS.items(): + # Use provided config or fall back to defaults + config = logger_configs.get(logger_name, default_configs.get(logger_name, {})) + logger_instance = logger_class(**config) + active_loggers.append(logger_instance) + logger.info(f"Successfully initialized {logger_name} logger") + + return active_loggers + @staticmethod def collect(results: list[dict[str, Any]]) -> pd.DataFrame: """Gather the results returned from run.""" diff --git a/src/anomalib/utils/exceptions/imports.py b/src/anomalib/utils/exceptions/imports.py index dac22ba056..6ef8dbd89d 100644 --- a/src/anomalib/utils/exceptions/imports.py +++ b/src/anomalib/utils/exceptions/imports.py @@ -22,7 +22,7 @@ def try_import(import_path: str) -> bool: warnings.warn( "The 'try_import' function is deprecated and will be removed in v2.0.0. " - "Use 'package_available' from lightning-utilities instead.", + "Use 'module_available' from lightning-utilities instead.", DeprecationWarning, stacklevel=2, )