From b44a207248cf48449076732b59f28c0480b21b05 Mon Sep 17 00:00:00 2001 From: Dan Saunders Date: Thu, 6 Mar 2025 17:44:32 +0000 Subject: [PATCH] update --- tests/e2e/multigpu/test_sequence_parallelism.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/e2e/multigpu/test_sequence_parallelism.py b/tests/e2e/multigpu/test_sequence_parallelism.py index 9619cf690..c57c76caf 100644 --- a/tests/e2e/multigpu/test_sequence_parallelism.py +++ b/tests/e2e/multigpu/test_sequence_parallelism.py @@ -73,12 +73,8 @@ def test_integration_with_config(): def test_ring_attn_group_creation(): """Test that ring attention groups are properly created in a multi-GPU environment.""" - # First ensure we're in a distributed environment if not torch.distributed.is_initialized(): - # Skip this test if not in distributed mode - pytest.skip( - "This test requires a properly initialized torch.distributed environment" - ) + torch.distributed.init_process_group("nccl") from axolotl.monkeypatch.attention.ring_attn import ( get_ring_attn_group,