diff --git a/deepmd/pt/train/training.py b/deepmd/pt/train/training.py index 49eccd60dc..392bdb8a7a 100644 --- a/deepmd/pt/train/training.py +++ b/deepmd/pt/train/training.py @@ -689,7 +689,7 @@ def step(_step_id, task_key="Default") -> None: ) loss.backward() if self.gradient_max_norm > 0.0: - grad_norm = torch.nn.utils.clip_grad_norm_( + torch.nn.utils.clip_grad_norm_( self.wrapper.parameters(), self.gradient_max_norm, error_if_nonfinite=True,