|
@@ -71,6 +71,7 @@ import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
|
|
|
+import org.apache.hadoop.hdfs.server.common.Storage;
|
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
|
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
|
|
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
|
|
@@ -271,7 +272,8 @@ public class TestNameNodeMetrics {
|
|
File dataDir = new File(fsVolume.getBaseURI());
|
|
File dataDir = new File(fsVolume.getBaseURI());
|
|
long capacity = fsVolume.getCapacity();
|
|
long capacity = fsVolume.getCapacity();
|
|
volumeReferences.close();
|
|
volumeReferences.close();
|
|
- DataNodeTestUtils.injectDataDirFailure(dataDir);
|
|
|
|
|
|
+ File storageDir = new File(dataDir, Storage.STORAGE_DIR_CURRENT);
|
|
|
|
+ DataNodeTestUtils.injectDataDirFailure(storageDir);
|
|
DataNodeTestUtils.waitForDiskError(dn, fsVolume);
|
|
DataNodeTestUtils.waitForDiskError(dn, fsVolume);
|
|
DataNodeTestUtils.triggerHeartbeat(dn);
|
|
DataNodeTestUtils.triggerHeartbeat(dn);
|
|
BlockManagerTestUtil.checkHeartbeat(bm);
|
|
BlockManagerTestUtil.checkHeartbeat(bm);
|