From baabec473202014c49e95bf1686281a1af1473e1 Mon Sep 17 00:00:00 2001 From: Dinghao Zhou Date: Mon, 27 May 2024 11:28:24 +0800 Subject: [PATCH] fix comment in fsdp_tp_example.py --- distributed/tensor_parallelism/fsdp_tp_example.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distributed/tensor_parallelism/fsdp_tp_example.py b/distributed/tensor_parallelism/fsdp_tp_example.py index dbab48c1b8..15052780c5 100644 --- a/distributed/tensor_parallelism/fsdp_tp_example.py +++ b/distributed/tensor_parallelism/fsdp_tp_example.py @@ -87,7 +87,7 @@ dp_mesh = device_mesh["dp"] # For TP, input needs to be same across all TP ranks. -# while for SP, input can be different across all ranks. +# while for DP, input can be different across all ranks. # We will use dp_rank for setting the random seed # to mimic the behavior of the dataloader. dp_rank = dp_mesh.get_local_rank()