From dba5ba4fe8b1bf26abb241897a7b1f9737a198ce Mon Sep 17 00:00:00 2001 From: Stefan Doerr Date: Tue, 3 Dec 2024 11:38:18 +0200 Subject: [PATCH 1/4] fixed pytorch deprecations warnings --- torchmdnet/extensions/__init__.py | 6 +++--- torchmdnet/models/model.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/torchmdnet/extensions/__init__.py b/torchmdnet/extensions/__init__.py index b8488cd9..8be37ff5 100644 --- a/torchmdnet/extensions/__init__.py +++ b/torchmdnet/extensions/__init__.py @@ -145,12 +145,12 @@ def get_neighbor_pairs_fwd_meta( if torch.__version__ >= "2.2.0": - from torch.library import impl_abstract + from torch.library import register_fake - impl_abstract( + register_fake( "torchmdnet_extensions::get_neighbor_pairs_bkwd", get_neighbor_pairs_bkwd_meta ) - impl_abstract( + register_fake( "torchmdnet_extensions::get_neighbor_pairs_fwd", get_neighbor_pairs_fwd_meta ) elif torch.__version__ < "2.2.0" and torch.__version__ >= "2.0.0": diff --git a/torchmdnet/models/model.py b/torchmdnet/models/model.py index efc3c0d6..a9d7bc7e 100644 --- a/torchmdnet/models/model.py +++ b/torchmdnet/models/model.py @@ -209,7 +209,7 @@ def load_model(filepath, args=None, device="cpu", return_std=False, **kwargs): filepath, args=args, device=device, return_std=return_std, **kwargs ) assert isinstance(filepath, str) - ckpt = torch.load(filepath, map_location="cpu") + ckpt = torch.load(filepath, map_location="cpu", weights_only=True) if args is None: args = ckpt["hyper_parameters"] From f5d67d6ac7f03ec559edb36ea10f17ce6d0f5bbc Mon Sep 17 00:00:00 2001 From: Stefan Doerr Date: Tue, 3 Dec 2024 11:49:21 +0200 Subject: [PATCH 2/4] Unsafe loading --- torchmdnet/models/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torchmdnet/models/model.py b/torchmdnet/models/model.py index a9d7bc7e..b7c8398c 100644 --- a/torchmdnet/models/model.py +++ b/torchmdnet/models/model.py @@ -209,7 +209,7 @@ def load_model(filepath, args=None, device="cpu", return_std=False, **kwargs): filepath, args=args, device=device, return_std=return_std, **kwargs ) assert isinstance(filepath, str) - ckpt = torch.load(filepath, map_location="cpu", weights_only=True) + ckpt = torch.load(filepath, map_location="cpu", weights_only=False) if args is None: args = ckpt["hyper_parameters"] From f0a48270370950da48403dabc3b51d4a009d6de2 Mon Sep 17 00:00:00 2001 From: Stefan Doerr Date: Tue, 3 Dec 2024 12:52:00 +0200 Subject: [PATCH 3/4] fronebius warning --- torchmdnet/extensions/neighbors/neighbors_cpu.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/torchmdnet/extensions/neighbors/neighbors_cpu.cpp b/torchmdnet/extensions/neighbors/neighbors_cpu.cpp index b985831a..13d36084 100644 --- a/torchmdnet/extensions/neighbors/neighbors_cpu.cpp +++ b/torchmdnet/extensions/neighbors/neighbors_cpu.cpp @@ -8,7 +8,7 @@ using std::tuple; using torch::arange; using torch::div; -using torch::frobenius_norm; +using torch::linalg_vector_norm; using torch::full; using torch::hstack; using torch::index_select; @@ -99,7 +99,7 @@ forward_impl(const std::string& strategy, const Tensor& positions, const Tensor& deltas.index_put_({Slice(), 0}, deltas.index({Slice(), 0}) - scale1 * box_vectors.index({pair_batch, 0, 0})); } - distances = frobenius_norm(deltas, 1); + distances = linalg_vector_norm(deltas, 2, 1); mask = (distances < cutoff_upper) * (distances >= cutoff_lower); neighbors = neighbors.index({Slice(), mask}); deltas = deltas.index({mask, Slice()}); From 535c5aecf9f0fcf59fc654ccc5d9ee6449d6adeb Mon Sep 17 00:00:00 2001 From: Stefan Doerr Date: Tue, 3 Dec 2024 12:52:21 +0200 Subject: [PATCH 4/4] skip optimize test if nnpops is not installed --- tests/test_optimize.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/test_optimize.py b/tests/test_optimize.py index 3645bfa5..90ac283f 100644 --- a/tests/test_optimize.py +++ b/tests/test_optimize.py @@ -4,14 +4,23 @@ import pytest from pytest import mark -import torch as pt -from torchmdnet.models.model import create_model -from torchmdnet.optimize import optimize -from torchmdnet.models.utils import dtype_mapping +try: + import NNPOps + + nnpops_available = True +except ImportError: + nnpops_available = False + + +@pytest.mark.skipif(not nnpops_available, reason="NNPOps not available") @mark.parametrize("device", ["cpu", "cuda"]) @mark.parametrize("num_atoms", [10, 100]) def test_gn(device, num_atoms): + import torch as pt + from torchmdnet.models.model import create_model + from torchmdnet.optimize import optimize + from torchmdnet.models.utils import dtype_mapping if not pt.cuda.is_available() and device == "cuda": pytest.skip("No GPU")