|
@@ -42,6 +42,7 @@ import org.apache.hadoop.conf.Configuration;
|
|
|
import org.apache.hadoop.fs.DF;
|
|
|
import org.apache.hadoop.fs.DU;
|
|
|
import org.apache.hadoop.fs.FileUtil;
|
|
|
+import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
|
import org.apache.hadoop.hdfs.protocol.Block;
|
|
|
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
|
|
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
|
@@ -839,14 +840,15 @@ public class FSDataset implements FSConstants, FSDatasetInterface {
|
|
|
// Used for synchronizing access to usage stats
|
|
|
private Object statsLock = new Object();
|
|
|
|
|
|
- boolean supportAppends = false;
|
|
|
+ boolean supportAppends = true;
|
|
|
|
|
|
/**
|
|
|
* An FSDataset has a directory where it loads its data files.
|
|
|
*/
|
|
|
public FSDataset(DataStorage storage, Configuration conf) throws IOException {
|
|
|
this.maxBlocksPerDir = conf.getInt("dfs.datanode.numblocks", 64);
|
|
|
- this.supportAppends = conf.getBoolean("dfs.support.append", false);
|
|
|
+ this.supportAppends = conf.getBoolean(DFSConfigKeys.DFS_SUPPORT_APPEND_KEY,
|
|
|
+ DFSConfigKeys.DFS_SUPPORT_APPEND_DEFAULT);
|
|
|
FSVolume[] volArray = new FSVolume[storage.getNumStorageDirs()];
|
|
|
for (int idx = 0; idx < storage.getNumStorageDirs(); idx++) {
|
|
|
volArray[idx] = new FSVolume(storage.getStorageDir(idx).getCurrentDir(), conf);
|