|
@@ -65,6 +65,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
|
|
import org.apache.hadoop.hdfs.DFSClient;
|
|
import org.apache.hadoop.hdfs.DFSClient;
|
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
import org.apache.hadoop.hdfs.DFSInputStream;
|
|
import org.apache.hadoop.hdfs.DFSInputStream;
|
|
|
|
+import org.apache.hadoop.hdfs.DFSOutputStream;
|
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
|
import org.apache.hadoop.hdfs.DFSUtil;
|
|
import org.apache.hadoop.hdfs.DFSUtil;
|
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
|
@@ -601,6 +602,7 @@ public class TestFsck {
|
|
out.write(randomString.getBytes());
|
|
out.write(randomString.getBytes());
|
|
writeCount++;
|
|
writeCount++;
|
|
}
|
|
}
|
|
|
|
+ ((DFSOutputStream) out.getWrappedStream()).hflush();
|
|
// We expect the filesystem to be HEALTHY and show one open file
|
|
// We expect the filesystem to be HEALTHY and show one open file
|
|
outStr = runFsck(conf, 0, true, topDir);
|
|
outStr = runFsck(conf, 0, true, topDir);
|
|
System.out.println(outStr);
|
|
System.out.println(outStr);
|