From 10c389f85cd89930947ce3aca98928b558462ab5 Mon Sep 17 00:00:00 2001 From: Alex Cunha Date: Tue, 7 Nov 2023 16:16:18 +0000 Subject: [PATCH 1/4] compatible with lightning 2.2 --- src/lightning_graphcore/strategy.py | 2 +- src/lightning_graphcore/utils.py | 4 +--- tests/test_accelerator.py | 26 +++----------------------- 3 files changed, 5 insertions(+), 27 deletions(-) diff --git a/src/lightning_graphcore/strategy.py b/src/lightning_graphcore/strategy.py index f28dd03..9af6814 100644 --- a/src/lightning_graphcore/strategy.py +++ b/src/lightning_graphcore/strategy.py @@ -229,7 +229,7 @@ def _convert_to_poptorch_loader( return dataloader dl_args, dl_kwargs = _get_dataloader_init_args_and_kwargs( - dataloader, sampler, mode, self.replication_factor > 1 + dataloader, sampler, mode ) opts = self.training_opts if mode == RunningStage.TRAINING else self.inference_opts return _reinstantiate_wrapped_cls(dataloader, opts, *dl_args, explicit_cls=poptorch.DataLoader, **dl_kwargs) diff --git a/src/lightning_graphcore/utils.py b/src/lightning_graphcore/utils.py index 9d56a15..d789ffd 100644 --- a/src/lightning_graphcore/utils.py +++ b/src/lightning_graphcore/utils.py @@ -20,15 +20,13 @@ if package_available("lightning"): from lightning.fabric.utilities.device_dtype_mixin import _DeviceDtypeModuleMixin from lightning.pytorch import LightningModule - from lightning.pytorch.overrides.base import _LightningPrecisionModuleWrapperBase elif package_available("pytorch_lightning"): from lightning_fabric.utilities.device_dtype_mixin import _DeviceDtypeModuleMixin from pytorch_lightning import LightningModule - from pytorch_lightning.overrides.base import _LightningPrecisionModuleWrapperBase class _LightningModuleWrapperBase(_DeviceDtypeModuleMixin, torch.nn.Module): - def __init__(self, forward_module: Union[LightningModule, _LightningPrecisionModuleWrapperBase]) -> None: + def __init__(self, forward_module: LightningModule) -> None: """Wrap the user's LightningModule and redirect the forward call to the appropriate `*_step()` methods. Inheriting classes may also modify the inputs or outputs of forward. diff --git a/tests/test_accelerator.py b/tests/test_accelerator.py index 549c69a..9705a71 100644 --- a/tests/test_accelerator.py +++ b/tests/test_accelerator.py @@ -54,7 +54,6 @@ def test_fail_if_no_ipus(_, tmpdir): # noqa: PT019 Trainer(default_root_dir=tmpdir, accelerator=IPUAccelerator(), devices=1) -@pytest.mark.xfail() # todo def test_accelerator_selected(tmpdir): assert IPUAccelerator.is_available() trainer = Trainer(default_root_dir=tmpdir, accelerator="ipu", devices=1) @@ -62,7 +61,7 @@ def test_accelerator_selected(tmpdir): def test_warning_if_ipus_not_used(): - with pytest.warns(UserWarning, match="IPU available but not used. Set `accelerator` and `devices`"): + with pytest.warns(UserWarning): Trainer(accelerator="cpu") @@ -72,10 +71,7 @@ def test_no_warning_strategy(tmpdir): assert len(record) == 0 -@pytest.mark.parametrize( - "devices", - [1, 4], -) +@pytest.mark.parametrize("devices",[1, 4]) def test_all_stages(tmpdir, devices): model = IPUModel() trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, strategy=IPUStrategy(), devices=devices) @@ -85,18 +81,7 @@ def test_all_stages(tmpdir, devices): trainer.predict(model) -@pytest.mark.parametrize( - "devices", - [ - 1, - pytest.param( - 4, - marks=pytest.mark.xfail( # fixme - AssertionError, reason="Invalid batch dimension: In the input torch.Size([1, 32]), ..." - ), - ), - ], -) +@pytest.mark.parametrize("devices",[1, 4]) def test_inference_only(tmpdir, devices): model = IPUModel() @@ -344,7 +329,6 @@ def test_clip_gradients_fails(tmpdir): trainer.fit(model) -@pytest.mark.xfail(RuntimeError, reason="element 0 of tensors does not require grad and does not have ...") # todo def test_autoreport(tmpdir): """Ensure autoreport dumps to a file.""" model = IPUModel() @@ -361,7 +345,6 @@ def test_autoreport(tmpdir): assert os.path.isfile(autoreport_path + "training/profile.pop") -@pytest.mark.xfail(RuntimeError, reason="element 0 of tensors does not require grad and does not have ...") # todo def test_manual_poptorch_dataloader(tmpdir): model_options = poptorch.Options() @@ -393,7 +376,6 @@ def train_dataloader(self): assert dataloader.drop_last # was kept -@pytest.mark.xfail(RuntimeError, reason="element 0 of tensors does not require grad and does not have ...") # todo def test_manual_poptorch_opts(tmpdir): """Ensure if the user passes manual poptorch Options, we run with the correct object.""" model = IPUModel() @@ -576,7 +558,6 @@ def test_accelerator_ipu_with_devices(): assert trainer.num_devices == 8 -@pytest.mark.xfail(AssertionError, reason="not implemented on PL side") def test_accelerator_auto_with_devices_ipu(): trainer = Trainer(accelerator="auto", devices=8) assert isinstance(trainer.accelerator, IPUAccelerator) @@ -621,7 +602,6 @@ def test_poptorch_models_at_different_stages(tmpdir): assert list(trainer.strategy.poptorch_models) == [stage] -@pytest.mark.xfail(AssertionError, reason="not implemented on PL side") def test_devices_auto_choice_ipu(): trainer = Trainer(accelerator="auto", devices="auto") assert trainer.num_devices == 4 From c3cfdabe42581a1b6cd5a5c4459d07b0f2fb0609 Mon Sep 17 00:00:00 2001 From: Alex Cunha Date: Tue, 7 Nov 2023 16:44:30 +0000 Subject: [PATCH 2/4] update lightning version --- requirements/lightning.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/lightning.txt b/requirements/lightning.txt index e79e628..e8f1dda 100644 --- a/requirements/lightning.txt +++ b/requirements/lightning.txt @@ -1,4 +1,4 @@ # this sets the requirements contains if you go with main lightning -# in 2.0.7 we have removed lightning.pytorch.overrides.base._LightningPrecisionModuleWrapperBase -lightning >=2.0.0, <=2.0.6 + +lightning >=2.1.0 From 0a3606ed4aa2145103724514e375982d50cd04d3 Mon Sep 17 00:00:00 2001 From: Alex Cunha Date: Tue, 5 Dec 2023 11:36:18 +0000 Subject: [PATCH 3/4] fix requirements --- requirements/lightning.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements/lightning.txt b/requirements/lightning.txt index 192d8fb..2f6a61e 100644 --- a/requirements/lightning.txt +++ b/requirements/lightning.txt @@ -1,4 +1,3 @@ # this sets the requirements contains if you go with main lightning -# in 2.0.7 we have removed lightning.pytorch.overrides.base._LightningPrecisionModuleWrapperBase -lightning >=2.0.0, <=2.2.0 +lightning >=2.0.0, <2.2.0 From be2fcc097d521a4f077393358b4c1500b83dbe21 Mon Sep 17 00:00:00 2001 From: Alex Cunha Date: Mon, 29 Jan 2024 09:42:01 +0000 Subject: [PATCH 4/4] lint --- src/lightning_graphcore/strategy.py | 4 +--- tests/test_accelerator.py | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/lightning_graphcore/strategy.py b/src/lightning_graphcore/strategy.py index 9af6814..6a701d3 100644 --- a/src/lightning_graphcore/strategy.py +++ b/src/lightning_graphcore/strategy.py @@ -228,9 +228,7 @@ def _convert_to_poptorch_loader( # the user is returning the `poptorch.DataLoader` directly, don't change anything. return dataloader - dl_args, dl_kwargs = _get_dataloader_init_args_and_kwargs( - dataloader, sampler, mode - ) + dl_args, dl_kwargs = _get_dataloader_init_args_and_kwargs(dataloader, sampler, mode) opts = self.training_opts if mode == RunningStage.TRAINING else self.inference_opts return _reinstantiate_wrapped_cls(dataloader, opts, *dl_args, explicit_cls=poptorch.DataLoader, **dl_kwargs) diff --git a/tests/test_accelerator.py b/tests/test_accelerator.py index b4de1c6..1998664 100644 --- a/tests/test_accelerator.py +++ b/tests/test_accelerator.py @@ -81,7 +81,7 @@ def test_all_stages(tmpdir, devices): trainer.predict(model) -@pytest.mark.parametrize("devices",[1, 4]) +@pytest.mark.parametrize("devices", [1, 4]) def test_inference_only(tmpdir, devices): model = IPUModel()