瀏覽代碼

HDFS-490. Eliminate the deprecated warnings introduced by H-5438. Contributed by He Yongqiang

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hdfs/trunk@798638 13f79535-47bb-0310-9956-ffa450edef68
Tsz-wo Sze 16 年之前
父節點
當前提交
cbe2a069e3

+ 3 - 0
CHANGES.txt

@@ -102,6 +102,9 @@ Trunk (unreleased changes)
     HDFS-501. Use enum to define the constants in DataTransferProtocol.
     (szetszwo)
 
+    HDFS-490. Eliminate the deprecated warnings introduced by H-5438.
+    (He Yongqiang via szetszwo)
+
 Release 0.20.1 - Unreleased
 
   IMPROVEMENTS

+ 4 - 3
src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs;
 
 import java.io.IOException;
+import java.util.EnumSet;
 import java.util.Random;
 
 import javax.security.auth.login.LoginException;
@@ -159,9 +160,9 @@ public class TestDFSPermission extends TestCase {
     // create the file/directory
     switch (op) {
     case CREATE:
-      FSDataOutputStream out = fs.create(name, permission, true, conf.getInt(
-          "io.file.buffer.size", 4096), fs.getDefaultReplication(), fs
-          .getDefaultBlockSize(), null);
+      FSDataOutputStream out = fs.create(name, permission, EnumSet.of(CreateFlag.OVERWRITE), 
+          conf.getInt("io.file.buffer.size", 4096),
+          fs.getDefaultReplication(), fs.getDefaultBlockSize(), null);
       out.close();
       break;
     case MKDIRS:

+ 3 - 1
src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java

@@ -21,12 +21,14 @@ import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
+import java.util.EnumSet;
 import java.util.Random;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -54,7 +56,7 @@ public class TestFSInputChecker extends TestCase {
   private void writeFile(FileSystem fileSys, Path name) throws IOException {
     // create and write a file that contains three blocks of data
     FSDataOutputStream stm = fileSys.create(name, new FsPermission((short)0777),
-        true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
+        EnumSet.of(CreateFlag.OVERWRITE), fileSys.getConf().getInt("io.file.buffer.size", 4096),
         NUM_OF_DATANODES, BLOCK_SIZE, null);
     stm.write(expected);
     stm.close();

+ 2 - 1
src/test/hdfs/org/apache/hadoop/security/TestPermission.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.security;
 
 import java.io.IOException;
+import java.util.EnumSet;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
@@ -86,7 +87,7 @@ public class TestPermission extends TestCase {
 
       FsPermission filePerm = new FsPermission((short)0444);
       FSDataOutputStream out = fs.create(new Path("/b1/b2/b3.txt"), filePerm,
-          true, conf.getInt("io.file.buffer.size", 4096),
+          EnumSet.of(CreateFlag.OVERWRITE), conf.getInt("io.file.buffer.size", 4096),
           fs.getDefaultReplication(), fs.getDefaultBlockSize(), null);
       out.write(123);
       out.close();