diff --git a/dinov2/layers/attention.py b/dinov2/layers/attention.py index 0fb76ef28..9e9f8e9e1 100644 --- a/dinov2/layers/attention.py +++ b/dinov2/layers/attention.py @@ -24,7 +24,6 @@ from xformers.ops import memory_efficient_attention, unbind XFORMERS_AVAILABLE = True - warnings.warn("xFormers is available (Attention)") else: warnings.warn("xFormers is disabled (Attention)") raise ImportError diff --git a/dinov2/layers/block.py b/dinov2/layers/block.py index 930787b26..06bdbc0f9 100644 --- a/dinov2/layers/block.py +++ b/dinov2/layers/block.py @@ -30,7 +30,6 @@ from xformers.ops import fmha, scaled_index_add, index_select_cat XFORMERS_AVAILABLE = True - warnings.warn("xFormers is available (Block)") else: warnings.warn("xFormers is disabled (Block)") raise ImportError diff --git a/dinov2/layers/swiglu_ffn.py b/dinov2/layers/swiglu_ffn.py index 5e9dafa45..dd57a4493 100644 --- a/dinov2/layers/swiglu_ffn.py +++ b/dinov2/layers/swiglu_ffn.py @@ -40,7 +40,6 @@ def forward(self, x: Tensor) -> Tensor: from xformers.ops import SwiGLU XFORMERS_AVAILABLE = True - warnings.warn("xFormers is available (SwiGLU)") else: warnings.warn("xFormers is disabled (SwiGLU)") raise ImportError