diff --git a/dinov2/layers/attention.py b/dinov2/layers/attention.py index f1d3dabf1..baba9ec4c 100644 --- a/dinov2/layers/attention.py +++ b/dinov2/layers/attention.py @@ -24,7 +24,6 @@ from xformers.ops import memory_efficient_attention, unbind XFORMERS_AVAILABLE = True - warnings.warn("xFormers is available (Attention)") else: warnings.warn("xFormers is disabled (Attention)") raise ImportError diff --git a/dinov2/layers/block.py b/dinov2/layers/block.py index 7e83b71cc..d1a0fbb8d 100644 --- a/dinov2/layers/block.py +++ b/dinov2/layers/block.py @@ -30,7 +30,6 @@ from xformers.ops import fmha, scaled_index_add, index_select_cat XFORMERS_AVAILABLE = True - warnings.warn("xFormers is available (Block)") else: warnings.warn("xFormers is disabled (Block)") raise ImportError diff --git a/dinov2/layers/swiglu_ffn.py b/dinov2/layers/swiglu_ffn.py index 340cee356..56c29df48 100644 --- a/dinov2/layers/swiglu_ffn.py +++ b/dinov2/layers/swiglu_ffn.py @@ -40,7 +40,6 @@ def forward(self, x: Tensor) -> Tensor: from xformers.ops import SwiGLU XFORMERS_AVAILABLE = True - warnings.warn("xFormers is available (SwiGLU)") else: warnings.warn("xFormers is disabled (SwiGLU)") raise ImportError