Skip to content

Commit b1e74db

Browse files
committed
convert to xfail
1 parent 2ce6ee1 commit b1e74db

File tree

6 files changed

+10
-7
lines changed

6 files changed

+10
-7
lines changed

tests/tests_fabric/plugins/precision/test_amp_integration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def forward(self, x):
4040
return output
4141

4242

43-
@pytest.mark.skipif(
43+
@pytest.mark.xfail(
4444
# https://github.com/pytorch/pytorch/issues/116056
4545
sys.platform == "win32" and _TORCH_GREATER_EQUAL_2_2,
4646
reason="Windows + DDP issue in PyTorch 2.2",

tests/tests_fabric/strategies/launchers/test_multiprocessing_integration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def __init__(self):
3131
self.register_buffer("buffer", torch.ones(3))
3232

3333

34-
@pytest.mark.skipif(
34+
@pytest.mark.xfail(
3535
# https://github.com/pytorch/pytorch/issues/116056
3636
sys.platform == "win32" and _TORCH_GREATER_EQUAL_2_2,
3737
reason="Windows + DDP issue in PyTorch 2.2",

tests/tests_fabric/strategies/test_ddp_integration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from tests_fabric.test_fabric import BoringModel
2929

3030

31-
@pytest.mark.skipif(
31+
@pytest.mark.xfail(
3232
# https://github.com/pytorch/pytorch/issues/116056
3333
sys.platform == "win32" and _TORCH_GREATER_EQUAL_2_2,
3434
reason="Windows + DDP issue in PyTorch 2.2",

tests/tests_fabric/strategies/test_fsdp_integration.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@
2222
from lightning.fabric import Fabric
2323
from lightning.fabric.plugins import FSDPPrecision
2424
from lightning.fabric.strategies import FSDPStrategy
25-
from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_0, _TORCH_GREATER_EQUAL_2_1
25+
from lightning.fabric.utilities.imports import (
26+
_TORCH_GREATER_EQUAL_2_0, _TORCH_GREATER_EQUAL_2_1, _TORCH_GREATER_EQUAL_2_2
27+
)
2628
from lightning.fabric.utilities.load import _load_distributed_checkpoint
2729
from lightning.fabric.wrappers import _FabricOptimizer
2830
from torch.distributed.fsdp import FlatParameter, FullyShardedDataParallel, OptimStateKeyType
@@ -560,7 +562,8 @@ def test_clip_gradients(clip_type, precision):
560562
optimizer.zero_grad()
561563

562564

563-
@RunIf(min_cuda_gpus=2, standalone=True, min_torch="2.1.0")
565+
@pytest.mark.xfail(_TORCH_GREATER_EQUAL_2_2, reason="Checkpoint consolidation not supported with PyTorch >= 2.2")
566+
@RunIf(min_cuda_gpus=2, standalone=True, min_torch="2.1.0", max_torch="2.2.0")
564567
def test_save_sharded_and_consolidate_and_load(tmp_path):
565568
"""Test the consolidation of a FSDP-sharded checkpoint into a single file."""
566569

tests/tests_fabric/utilities/test_distributed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def test_collective_operations(devices, process):
120120
spawn_launch(process, devices)
121121

122122

123-
@pytest.mark.skipif(
123+
@pytest.mark.xfail(
124124
# https://github.com/pytorch/pytorch/issues/116056
125125
sys.platform == "win32" and _TORCH_GREATER_EQUAL_2_2,
126126
reason="Windows + DDP issue in PyTorch 2.2",

tests/tests_fabric/utilities/test_spike.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def spike_detection_test(fabric, global_rank_spike, spike_value, should_raise):
2929
)
3030

3131

32-
@pytest.mark.skipif(
32+
@pytest.mark.xfail(
3333
# https://github.com/pytorch/pytorch/issues/116056
3434
sys.platform == "win32" and _TORCH_GREATER_EQUAL_2_2,
3535
reason="Windows + DDP issue in PyTorch 2.2",

0 commit comments

Comments
 (0)