Browse Source

HADOOP-7870. fix SequenceFile#createWriter with boolean createParent arg to respect createParent. Contributed by Jon Hsieh

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1@1208909 13f79535-47bb-0310-9956-ffa450edef68
Eli Collins 13 năm trước cách đây
mục cha
commit
68614edce8

+ 3 - 0
CHANGES.txt

@@ -52,6 +52,9 @@ Release 1.1.0 - unreleased
     MAPREDUCE-2376. test-task-controller fails if run as a userid < 1000.
     (todd via eli)
 
+    HADOOP-7870. fix SequenceFile#createWriter with boolean
+    createParent arg to respect createParent. (Jon Hsieh via eli)
+
   IMPROVEMENTS
 
     MAPREDUCE-3008. [Gridmix] Improve cumulative CPU usage emulation for 

+ 14 - 9
src/core/org/apache/hadoop/io/SequenceFile.java

@@ -437,19 +437,24 @@ public class SequenceFile {
                                          "GzipCodec without native-hadoop code!");
     }
 
+
+    FSDataOutputStream fsos;
+    if (createParent) {
+      fsos = fs.create(name, true, bufferSize, replication, blockSize);
+    } else {
+      fsos = fs.createNonRecursive(name, true, bufferSize, replication,
+          blockSize, null);
+    }
+
     switch (compressionType) {
     case NONE:
-      return new Writer(conf, 
-          fs.createNonRecursive(name, true, bufferSize, replication, blockSize, null),
-          keyClass, valClass, metadata).ownStream();
+      return new Writer(conf, fsos, keyClass, valClass, metadata).ownStream();
     case RECORD:
-      return new RecordCompressWriter(conf, 
-          fs.createNonRecursive(name, true, bufferSize, replication, blockSize, null),
-          keyClass, valClass, codec, metadata).ownStream();
+      return new RecordCompressWriter(conf, fsos, keyClass, valClass, codec,
+          metadata).ownStream();
     case BLOCK:
-      return new BlockCompressWriter(conf,
-          fs.createNonRecursive(name, true, bufferSize, replication, blockSize, null),
-          keyClass, valClass, codec, metadata).ownStream();
+      return new BlockCompressWriter(conf, fsos, keyClass, valClass, codec,
+          metadata).ownStream();
     default:
       return null;
     }

+ 26 - 0
src/test/org/apache/hadoop/io/TestSequenceFile.java

@@ -26,6 +26,7 @@ import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.SequenceFile.Metadata;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -440,6 +441,31 @@ public class TestSequenceFile extends TestCase {
     assertFalse(reader2.next(text));
   }
 
+
+  public void testRecursiveSeqFileCreate() throws IOException {
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.getLocal(conf);
+    Path name = new Path(new Path(System.getProperty("test.build.data","."),
+        "recursiveCreateDir") , "file");
+    boolean createParent = false;
+
+    try {
+      SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
+          RandomDatum.class, 512, (short) 1, 4096, createParent,
+          CompressionType.NONE, null, new Metadata());
+      fail("Expected an IOException due to missing parent");
+    } catch (IOException ioe) {
+      // Expected
+    }
+
+    createParent = true;
+    SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
+        RandomDatum.class, 512, (short) 1, 4096, createParent,
+        CompressionType.NONE, null, new Metadata());
+
+    // should succeed, fails if exception thrown
+  }
+  
   /** For debugging and testing. */
   public static void main(String[] args) throws Exception {
     int count = 1024 * 1024;