Skip to content

Commit

Permalink
Remove stack trace captures from import (pytorch#97274)
Browse files Browse the repository at this point in the history
Summary:
Calls to this function without an argument will get a stack trace at
import time. This is expensive, we can just skip it by passing in a value.

Test Plan: Wait for tests

Differential Revision: D44244345

Pull Request resolved: pytorch#97274
Approved by: https://github.com/kiukchung
  • Loading branch information
jeffdunn authored and pytorchmergebot committed Mar 22, 2023
1 parent 9c144bc commit d779dad
Show file tree
Hide file tree
Showing 7 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion torch/distributed/elastic/agent/server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
_TERMINAL_STATE_SYNC_ID = "torchelastic/agent/terminal_state"

DEFAULT_ROLE = "default"
log = get_logger()
log = get_logger(__name__)


@dataclass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from torch.distributed.elastic.utils import macros
from torch.distributed.elastic.utils.logging import get_logger

log = get_logger()
log = get_logger(__name__)

__all__ = [
"LocalElasticAgent",
Expand Down
2 changes: 1 addition & 1 deletion torch/distributed/elastic/multiprocessing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def trainer(a, b, c):
)
from torch.distributed.elastic.utils.logging import get_logger

log = get_logger()
log = get_logger(__name__)


def start_processes(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@

__all__ = ["ProcessFailure", "ChildFailedError", "record", "ErrorHandler", "get_error_handler"]

log = get_logger()
log = get_logger(__name__)


JSON = Dict
Expand Down
2 changes: 1 addition & 1 deletion torch/distributed/elastic/utils/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from torch.distributed.elastic.utils.logging import get_logger


log = get_logger()
log = get_logger(__name__)

_ADDRESS_IN_USE = "Address already in use"
_SOCKET_TIMEOUT = "Socket Timeout"
Expand Down
2 changes: 1 addition & 1 deletion torch/distributed/launcher/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

__all__ = ['LaunchConfig', 'elastic_launch', 'launch_agent']

logger = get_logger()
logger = get_logger(__name__)


@dataclass
Expand Down
2 changes: 1 addition & 1 deletion torch/distributed/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ def main():
from torch.distributed.launcher.api import LaunchConfig, elastic_launch


log = get_logger()
log = get_logger(__name__)


def get_args_parser() -> ArgumentParser:
Expand Down

0 comments on commit d779dad

Please sign in to comment.