diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md index 4850d247db423..eba84a726bd6b 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md @@ -534,6 +534,10 @@ Each metrics record contains tags such as SessionId and Hostname as additional i | `ProcessedCommandsOpNumOps` | Total number of processed commands operations | | `ProcessedCommandsOpAvgTime` | Average time of processed commands operations in milliseconds | | `NullStorageBlockReports` | Number of blocks in IBRs that failed due to null storage | +| `AcquireDatasetReadLockNumOps` | Total number of acquiring dataset read lock operations | +| `AcquireDatasetReadLockAvgTime` | Average time of acquiring dataset read lock operations in nanoseconds | +| `AcquireDatasetWriteLockNumOps` | Total number of acquiring dataset write lock operations | +| `AcquireDatasetWriteLockAvgTime` | Average time of acquiring dataset write lock operations in nanoseconds | FsVolume -------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 87e8eee681d1d..ef778791cfd9c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -512,7 +512,7 @@ private static Tracer createTracer(Configuration conf) { this.pipelineSupportSlownode = false; this.socketFactory = NetUtils.getDefaultSocketFactory(conf); this.dnConf = new DNConf(this); - this.dataSetLockManager = new DataSetLockManager(conf); + this.dataSetLockManager = new DataSetLockManager(conf, this); initOOBTimeout(); storageLocationChecker = null; volumeChecker = new DatasetVolumeChecker(conf, new Timer()); @@ -535,7 +535,7 @@ private static Tracer createTracer(Configuration conf) { super(conf); this.tracer = createTracer(conf); this.fileIoProvider = new FileIoProvider(conf, this); - this.dataSetLockManager = new DataSetLockManager(conf); + this.dataSetLockManager = new DataSetLockManager(conf, this); this.blockScanner = new BlockScanner(this); this.lastDiskErrorCheck = 0; this.maxNumberOfBlocksToLog = conf.getLong(DFS_MAX_NUM_BLOCKS_TO_LOG_KEY, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java index 5579541eb72d8..3abcf12fc8b12 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java @@ -27,6 +27,7 @@ import java.util.Stack; import java.util.concurrent.locks.ReentrantReadWriteLock; +import org.apache.hadoop.util.Time; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,6 +41,7 @@ public class DataSetLockManager implements DataNodeLockManager datanodes = cluster.getDataNodes(); + assertEquals(datanodes.size(), 1); + DataNode datanode = datanodes.get(0); + MetricsRecordBuilder rb = getMetrics(datanode.getMetrics().name()); + assertCounterGt("AcquireDatasetWriteLockNumOps", (long)1, rb); + assertCounterGt("AcquireDatasetReadLockNumOps", (long)1, rb); + } + } }