Skip to content

Commit

Permalink
NumPy to Torch: For LR Finder (Lightning-AI#17264)
Browse files Browse the repository at this point in the history
Co-authored-by: Jirka Borovec <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored Apr 4, 2023
1 parent 6cbc9df commit 7879476
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions src/lightning/pytorch/tuner/lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from copy import deepcopy
from typing import Any, cast, Dict, List, Optional, TYPE_CHECKING, Union

import numpy as np
import torch
from lightning_utilities.core.imports import RequirementCache

Expand Down Expand Up @@ -184,8 +183,9 @@ def suggestion(self, skip_begin: int = 10, skip_end: int = 1) -> Optional[float]
The suggested initial learning rate to use, or `None` if a suggestion is not possible due to too few
loss samples.
"""
losses = np.array(self.results["loss"][skip_begin:-skip_end])
losses = losses[np.isfinite(losses)]
losses = torch.tensor(self.results["loss"][skip_begin:-skip_end])
losses = losses[torch.isfinite(losses)]

if len(losses) < 2:
# computing np.gradient requires at least 2 points
log.error(
Expand All @@ -197,7 +197,9 @@ def suggestion(self, skip_begin: int = 10, skip_end: int = 1) -> Optional[float]

# TODO: When computing the argmin here, and some losses are non-finite, the expected indices could be
# incorrectly shifted by an offset
min_grad = np.gradient(losses).argmin()
gradients = torch.gradient(losses)[0] # Unpack the tuple
min_grad = torch.argmin(gradients).item()

self._optimal_idx = min_grad + skip_begin
return self.results["lr"][self._optimal_idx]

Expand Down

0 comments on commit 7879476

Please sign in to comment.