diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java index dafbb4ed50227..dc8c1e87fe18c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataSetLockManager.java @@ -179,7 +179,7 @@ public AutoCloseDataSetLock readLock(LockLevel level, String... resources) { AutoCloseDataSetLock dirLock = getReadLock(level, resources); dirLock.setParentLock(volLock); if (openLockTrace) { - LOG.debug("Sub lock " + resources[0] + resources[1] + resources[2] + " parent lock " + + LOG.info("Sub lock " + resources[0] + resources[1] + resources[2] + " parent lock " + resources[0] + resources[1]); } return dirLock; @@ -206,7 +206,7 @@ public AutoCloseDataSetLock writeLock(LockLevel level, String... resources) { AutoCloseDataSetLock dirLock = getWriteLock(level, resources); dirLock.setParentLock(volLock); if (openLockTrace) { - LOG.debug("Sub lock " + resources[0] + resources[1] + resources[2] + " parent lock " + + LOG.info("Sub lock " + resources[0] + resources[1] + resources[2] + " parent lock " + resources[0] + resources[1]); } return dirLock; @@ -273,6 +273,9 @@ public void addLock(LockLevel level, String... resources) { new ReentrantReadWriteLock(isFair)); lockMap.addLock(lockName, new ReentrantReadWriteLock(isFair)); } + if (openLockTrace) { + LOG.info("Added {} lock, lock name: {}", level.name(), lockName); + } } @Override @@ -281,6 +284,9 @@ public void removeLock(LockLevel level, String... resources) { try (AutoCloseDataSetLock lock = writeLock(level, resources)) { lockMap.removeLock(lockName); } + if (openLockTrace) { + LOG.info("Removed {} lock, lock name: {}", level.name(), lockName); + } } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java index 934c5faee2f92..0e5aab062074f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java @@ -439,9 +439,9 @@ private synchronized void activateVolume( List allSubDirNameForDataSetLock = datasetSubLockStrategy.getAllSubLockNames(); for (String dir : allSubDirNameForDataSetLock) { lockManager.addLock(LockLevel.DIR, bp, ref.getVolume().getStorageID(), dir); - LOG.info("Added DIR lock for bpid:{}, volume storageid:{}, dir:{}", - bp, ref.getVolume().getStorageID(), dir); } + LOG.info("Added DIR lock for bpid:{}, volume storageid:{}.", bp, + ref.getVolume().getStorageID()); } DatanodeStorage dnStorage = storageMap.get(sd.getStorageUuid()); if (dnStorage != null) { @@ -3297,9 +3297,8 @@ public void addBlockPool(String bpid, Configuration conf) List allSubDirNameForDataSetLock = datasetSubLockStrategy.getAllSubLockNames(); for (String dir : allSubDirNameForDataSetLock) { lockManager.addLock(LockLevel.DIR, bpid, v, dir); - LOG.info("Added DIR lock for bpid:{}, volume storageid:{}, dir:{}", - bpid, v, dir); } + LOG.info("Added DIR lock for bpid:{}, volume storageid:{}.", bpid, v); } } try {