Jelajahi Sumber

HDFS-7516. Fix findbugs warnings in hdfs-nfs project. Contributed by Brandon Li

(cherry picked from commit 42d8858c5d237c4d9ab439c570a17b7fcaf781c2)
Brandon Li 10 tahun lalu
induk
melakukan
781a1e352c
14 mengubah file dengan 40 tambahan dan 27 penghapusan
  1. 2 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
  2. 3 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java
  3. 2 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java
  4. 2 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
  5. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java
  6. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java
  7. 2 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java
  8. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java
  9. 5 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java
  10. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java
  11. 5 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java
  12. 3 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
  13. 2 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
  14. 2 0
      hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

+ 2 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.mount;
 
 import java.util.List;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.NfsExports;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.XDR;
@@ -76,7 +77,7 @@ public class MountResponse {
       if (hostGroups.length > 0) {
         for (int j = 0; j < hostGroups.length; j++) {
           xdr.writeBoolean(true); // Value follows - yes
-          xdr.writeVariableOpaque(hostGroups[j].getBytes());
+          xdr.writeVariableOpaque(hostGroups[j].getBytes(Charsets.UTF_8));
         }
       }
       xdr.writeBoolean(false); // Value follows - no more group

+ 3 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java

@@ -22,6 +22,7 @@ import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Arrays;
 
+import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.XDR;
@@ -72,10 +73,8 @@ public class FileHandle {
       return;
     }
 
-    byte[] in = s.getBytes();
-    for (int i = 0; i < in.length; i++) {
-      digest.update(in[i]);
-    }
+    byte[] in = s.getBytes(Charsets.UTF_8);
+    digest.update(in);
 
     byte[] digestbytes = digest.digest();
     for (int i = 0; i < 16; i++) {

+ 2 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
 import org.apache.hadoop.oncrpc.XDR;
@@ -78,7 +79,7 @@ public class CREATE3Request extends RequestWithHandle {
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
     xdr.writeInt(name.length());
-    xdr.writeFixedOpaque(name.getBytes(), name.length());
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
     xdr.writeInt(mode);
     objAttr.serialize(xdr);
   }

+ 2 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -56,6 +57,6 @@ public class LINK3Request extends RequestWithHandle {
     handle.serialize(xdr);
     fromDirHandle.serialize(xdr);
     xdr.writeInt(fromName.length());
-    xdr.writeFixedOpaque(fromName.getBytes(), fromName.length());
+    xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8), fromName.length());
   }
 }

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -53,7 +54,7 @@ public class LOOKUP3Request extends RequestWithHandle {
   @VisibleForTesting
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes().length);
-    xdr.writeFixedOpaque(name.getBytes());
+    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
   }
 }

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -54,8 +55,8 @@ public class MKDIR3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes().length);
-    xdr.writeFixedOpaque(name.getBytes());
+    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
     objAttr.serialize(xdr);
   }
 }

+ 2 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.NfsFileType;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3;
@@ -79,7 +80,7 @@ public class MKNOD3Request extends RequestWithHandle {
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
     xdr.writeInt(name.length());
-    xdr.writeFixedOpaque(name.getBytes(), name.length());
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
     objAttr.serialize(xdr);
     if (spec != null) {
       xdr.writeInt(spec.getSpecdata1());

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -46,7 +47,7 @@ public class REMOVE3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes().length);
-    xdr.writeFixedOpaque(name.getBytes());
+    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
   }
 }

+ 5 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -66,10 +67,10 @@ public class RENAME3Request extends NFS3Request {
   @Override
   public void serialize(XDR xdr) {
     fromDirHandle.serialize(xdr);
-    xdr.writeInt(fromName.getBytes().length);
-    xdr.writeFixedOpaque(fromName.getBytes());
+    xdr.writeInt(fromName.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8));
     toDirHandle.serialize(xdr);
-    xdr.writeInt(toName.getBytes().length);
-    xdr.writeFixedOpaque(toName.getBytes());
+    xdr.writeInt(toName.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(toName.getBytes(Charsets.UTF_8));
   }
 }

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -46,7 +47,7 @@ public class RMDIR3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes().length);
-    xdr.writeFixedOpaque(name.getBytes());
+    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
   }
 }

+ 5 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
 
 import java.io.IOException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.oncrpc.XDR;
 
@@ -62,10 +63,10 @@ public class SYMLINK3Request extends RequestWithHandle {
   @Override
   public void serialize(XDR xdr) {
     handle.serialize(xdr);
-    xdr.writeInt(name.getBytes().length);
-    xdr.writeFixedOpaque(name.getBytes());
+    xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
     symAttr.serialize(xdr);
-    xdr.writeInt(symData.getBytes().length);
-    xdr.writeFixedOpaque(symData.getBytes());
+    xdr.writeInt(symData.getBytes(Charsets.UTF_8).length);
+    xdr.writeFixedOpaque(symData.getBytes(Charsets.UTF_8));
   }
 }

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.oncrpc;
 
 import java.nio.ByteBuffer;
 
+import org.apache.commons.io.Charsets;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
 
@@ -165,11 +166,11 @@ public final class XDR {
   }
 
   public String readString() {
-    return new String(readVariableOpaque());
+    return new String(readVariableOpaque(), Charsets.UTF_8);
   }
 
   public void writeString(String s) {
-    writeVariableOpaque(s.getBytes());
+    writeVariableOpaque(s.getBytes(Charsets.UTF_8));
   }
 
   private void writePadding() {

+ 2 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.oncrpc.security;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 
+import org.apache.commons.io.Charsets;
 import org.apache.hadoop.oncrpc.XDR;
 
 /** Credential used by AUTH_SYS */
@@ -93,7 +94,7 @@ public class CredentialsSys extends Credentials {
   @Override
   public void write(XDR xdr) {
     // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count
-    mCredentialsLength = 20 + mHostName.getBytes().length;
+    mCredentialsLength = 20 + mHostName.getBytes(Charsets.UTF_8).length;
     // mAuxGIDs
     if (mAuxGIDs != null && mAuxGIDs.length > 0) {
       mCredentialsLength += mAuxGIDs.length * 4;

+ 2 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

@@ -335,6 +335,8 @@ Release 2.7.0 - UNRELEASED
 
     HDFS-7506. Consolidate implementation of setting inode attributes into a
     single class. (wheat9)
+    
+    HDFS-7516. Fix findbugs warnings in hdfs-nfs project. (brandonli)
 
 Release 2.6.1 - UNRELEASED