Explorar o código

HADOOP-10482. Fix various findbugs warnings in hadoop-common. Contributed by Haohui Mai.

Haohui Mai %!s(int64=10) %!d(string=hai) anos
pai
achega
bbd6a32776
Modificáronse 30 ficheiros con 118 adicións e 145 borrados
  1. 2 0
      hadoop-common-project/hadoop-common/CHANGES.txt
  2. 10 0
      hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
  3. 2 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  4. 2 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
  5. 0 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
  6. 4 19
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
  7. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
  8. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
  9. 8 13
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
  10. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
  11. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java
  12. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
  13. 0 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
  14. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java
  15. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
  16. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
  17. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
  18. 8 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
  19. 0 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java
  20. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java
  21. 9 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java
  22. 10 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java
  23. 15 15
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java
  24. 15 15
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java
  25. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
  26. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java
  27. 1 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
  28. 2 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java
  29. 7 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java
  30. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -558,6 +558,8 @@ Release 2.7.0 - UNRELEASED
     HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws,
     hadoop-azure, and hadoop-openstack. (Li Lu via wheat9)
 
+    HADOOP-10482. Fix various findbugs warnings in hadoop-common. (wheat9)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

+ 10 - 0
hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

@@ -241,6 +241,16 @@
        <Method name="writeVLong" />
        <Bug pattern="SF_SWITCH_FALLTHROUGH" />
      </Match>
+     <Match>
+       <Class name="org.apache.hadoop.io.Text" />
+       <Method name="bytesToCodePoint" />
+       <Bug pattern="SF_SWITCH_NO_DEFAULT" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.util.PureJavaCrc32C" />
+       <Method name="update" />
+       <Bug pattern="SF_SWITCH_NO_DEFAULT" />
+     </Match>
     <!--
 	  The switch condition fall through is intentional and for performance
 	  purposes.

+ 2 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -1487,11 +1487,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param pattern new value
    */
   public void setPattern(String name, Pattern pattern) {
-    if (null == pattern) {
-      set(name, null);
-    } else {
-      set(name, pattern.pattern());
-    }
+    assert pattern != null : "Pattern cannot be null";
+    set(name, pattern.pattern());
   }
 
   /**

+ 2 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java

@@ -144,13 +144,8 @@ public class JavaKeyStoreProvider extends KeyProvider {
           // Provided Password file does not exist
           throw new IOException("Password file does not exists");
         }
-        if (pwdFile != null) {
-          InputStream is = pwdFile.openStream();
-          try {
-            password = IOUtils.toString(is).trim().toCharArray();
-          } finally {
-            is.close();
-          }
+        try (InputStream is = pwdFile.openStream()) {
+          password = IOUtils.toString(is).trim().toCharArray();
         }
       }
     }

+ 0 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

@@ -2618,9 +2618,6 @@ public abstract class FileSystem extends Configured implements Closeable {
   private static FileSystem createFileSystem(URI uri, Configuration conf
       ) throws IOException {
     Class<?> clazz = getFileSystemClass(uri.getScheme(), conf);
-    if (clazz == null) {
-      throw new IOException("No FileSystem for scheme: " + uri.getScheme());
-    }
     FileSystem fs = (FileSystem)ReflectionUtils.newInstance(clazz, conf);
     fs.initialize(uri, conf);
     return fs;

+ 4 - 19
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

@@ -220,12 +220,7 @@ public class HarFileSystem extends FileSystem {
       return FileSystem.getDefaultUri(conf);
     }
     String authority = rawURI.getAuthority();
-    if (authority == null) {
-      throw new IOException("URI: " + rawURI
-          + " is an invalid Har URI since authority==null."
-          + "  Expecting har://<scheme>-<host>/<path>.");
-    }
- 
+
     int i = authority.indexOf('-');
     if (i < 0) {
       throw new IOException("URI: " + rawURI
@@ -489,19 +484,12 @@ public class HarFileSystem extends FileSystem {
   }
   
   static class Store {
-    public Store() {
-      begin = end = startHash = endHash = 0;
-    }
-    public Store(long begin, long end, int startHash, int endHash) {
+    public Store(long begin, long end) {
       this.begin = begin;
       this.end = end;
-      this.startHash = startHash;
-      this.endHash = endHash;
     }
     public long begin;
     public long end;
-    public int startHash;
-    public int endHash;
   }
   
   /**
@@ -594,7 +582,7 @@ public class HarFileSystem extends FileSystem {
     public HarStatus(String harString) throws UnsupportedEncodingException {
       String[] splits = harString.split(" ");
       this.name = decodeFileName(splits[0]);
-      this.isDir = "dir".equals(splits[1]) ? true: false;
+      this.isDir = "dir".equals(splits[1]);
       // this is equal to "none" if its a directory
       this.partName = splits[2];
       this.startIndex = Long.parseLong(splits[3]);
@@ -1167,11 +1155,8 @@ public class HarFileSystem extends FileSystem {
           int b = lin.readLine(line);
           read += b;
           readStr = line.toString().split(" ");
-          int startHash = Integer.parseInt(readStr[0]);
-          int endHash  = Integer.parseInt(readStr[1]);
           stores.add(new Store(Long.parseLong(readStr[2]), 
-              Long.parseLong(readStr[3]), startHash,
-              endHash));
+              Long.parseLong(readStr[3])));
           line.clear();
         }
       } catch (IOException ioe) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java

@@ -372,7 +372,7 @@ public class LocalDirAllocator {
         // Keep rolling the wheel till we get a valid path
         Random r = new java.util.Random();
         while (numDirsSearched < numDirs && returnPath == null) {
-          long randomPosition = Math.abs(r.nextLong()) % totalAvailable;
+          long randomPosition = (r.nextLong() >>> 1) % totalAvailable;
           int dir = 0;
           while (randomPosition > availableOnDisk[dir]) {
             randomPosition -= availableOnDisk[dir];

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java

@@ -143,13 +143,13 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
       switch (finalCrcType) {
         case CRC32:
           return new MD5MD5CRC32GzipFileChecksum(
-              Integer.valueOf(bytesPerCRC),
-              Integer.valueOf(crcPerBlock),
+              Integer.parseInt(bytesPerCRC),
+              Integer.parseInt(crcPerBlock),
               new MD5Hash(md5));
         case CRC32C:
           return new MD5MD5CRC32CastagnoliFileChecksum(
-              Integer.valueOf(bytesPerCRC),
-              Integer.valueOf(crcPerBlock),
+              Integer.parseInt(bytesPerCRC),
+              Integer.parseInt(crcPerBlock),
               new MD5Hash(md5));
         default:
           // we should never get here since finalCrcType will

+ 8 - 13
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java

@@ -23,6 +23,7 @@ import java.io.InputStream;
 import java.net.ConnectException;
 import java.net.URI;
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.ftp.FTP;
@@ -101,17 +102,12 @@ public class FTPFileSystem extends FileSystem {
     if (userAndPassword == null) {
       userAndPassword = (conf.get("fs.ftp.user." + host, null) + ":" + conf
           .get("fs.ftp.password." + host, null));
-      if (userAndPassword == null) {
-        throw new IOException("Invalid user/passsword specified");
-      }
     }
     String[] userPasswdInfo = userAndPassword.split(":");
+    Preconditions.checkState(userPasswdInfo.length > 1,
+                             "Invalid username / password");
     conf.set(FS_FTP_USER_PREFIX + host, userPasswdInfo[0]);
-    if (userPasswdInfo.length > 1) {
-      conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
-    } else {
-      conf.set(FS_FTP_PASSWORD_PREFIX + host, null);
-    }
+    conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
     setConf(conf);
     this.uri = uri;
   }
@@ -293,7 +289,8 @@ public class FTPFileSystem extends FileSystem {
    */
   private boolean exists(FTPClient client, Path file) throws IOException {
     try {
-      return getFileStatus(client, file) != null;
+      getFileStatus(client, file);
+      return true;
     } catch (FileNotFoundException fnfe) {
       return false;
     }
@@ -333,10 +330,8 @@ public class FTPFileSystem extends FileSystem {
     if (dirEntries != null && dirEntries.length > 0 && !(recursive)) {
       throw new IOException("Directory: " + file + " is not empty.");
     }
-    if (dirEntries != null) {
-      for (int i = 0; i < dirEntries.length; i++) {
-        delete(client, new Path(absolute, dirEntries[i].getPath()), recursive);
-      }
+    for (FileStatus dirEntry : dirEntries) {
+      delete(client, new Path(absolute, dirEntry.getPath()), recursive);
     }
     return client.removeDirectory(pathName);
   }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java

@@ -57,7 +57,7 @@ class Ls extends FsCommand {
 		  
   
 
-  protected static final SimpleDateFormat dateFormat = 
+  protected final SimpleDateFormat dateFormat =
     new SimpleDateFormat("yyyy-MM-dd HH:mm");
 
   protected int maxRepl = 3, maxLen = 10, maxOwner = 0, maxGroup = 0;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java

@@ -55,8 +55,8 @@ class Stat extends FsCommand {
     "in the specified format. Format accepts filesize in blocks (%b), group name of owner(%g), " +
     "filename (%n), block size (%o), replication (%r), user name of owner(%u), modification date (%y, %Y)\n";
 
-  protected static final SimpleDateFormat timeFmt;
-  static {
+  protected final SimpleDateFormat timeFmt;
+  {
     timeFmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
     timeFmt.setTimeZone(TimeZone.getTimeZone("UTC"));
   }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java

@@ -83,6 +83,8 @@ class Test extends FsCommand {
       case 'z':
         test = (item.stat.getLen() == 0);
         break;
+      default:
+        break;
     }
     if (!test) exitCode = 1;
   }

+ 0 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java

@@ -168,12 +168,6 @@ public abstract class HAAdmin extends Configured implements Tool {
   private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive)
       throws IOException  {
     Collection<String> targetIds = getTargetIds(targetNodeToActivate);
-    if(targetIds == null) {
-      errOut.println("transitionToActive: No target node in the "
-          + "current configuration");
-      printUsage(errOut, "-transitionToActive");
-      return true;
-    }
     targetIds.remove(targetNodeToActivate);
     for(String targetId : targetIds) {
       HAServiceTarget target = resolveTarget(targetId);

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java

@@ -310,6 +310,8 @@ public class SshFenceByTcpPort extends Configured
       case com.jcraft.jsch.Logger.FATAL:
         LOG.fatal(message);
         break;
+      default:
+        break;
       }
     }
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java

@@ -99,11 +99,11 @@ public class LongWritable implements WritableComparable<LongWritable> {
     
     @Override
     public int compare(WritableComparable a, WritableComparable b) {
-      return -super.compare(a, b);
+      return super.compare(b, a);
     }
     @Override
     public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
-      return -super.compare(b1, s1, l1, b2, s2, l2);
+      return super.compare(b2, s2, l2, b1, s1, l1);
     }
   }
 

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java

@@ -584,6 +584,8 @@ public class Text extends BinaryComparable
           state = TRAIL_BYTE;
         }
         break;
+      default:
+        break;
       } // switch (state)
       count++;
     }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java

@@ -40,7 +40,7 @@ public class DecompressorStream extends CompressionInputStream {
   throws IOException {
     super(in);
 
-    if (in == null || decompressor == null) {
+    if (decompressor == null) {
       throw new NullPointerException();
     } else if (bufferSize <= 0) {
       throw new IllegalArgumentException("Illegal bufferSize");

+ 8 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

@@ -665,7 +665,7 @@ public abstract class Server {
         assert !running;
         readSelector.wakeup();
         try {
-          join();
+          super.join();
         } catch (InterruptedException ie) {
           Thread.currentThread().interrupt();
         }
@@ -1119,7 +1119,8 @@ public abstract class Server {
     private ByteBuffer data;
     private ByteBuffer dataLengthBuffer;
     private LinkedList<Call> responseQueue;
-    private volatile int rpcCount = 0; // number of outstanding rpcs
+    // number of outstanding rpcs
+    private AtomicInteger rpcCount = new AtomicInteger();
     private long lastContact;
     private int dataLength;
     private Socket socket;
@@ -1207,17 +1208,17 @@ public abstract class Server {
 
     /* Return true if the connection has no outstanding rpc */
     private boolean isIdle() {
-      return rpcCount == 0;
+      return rpcCount.get() == 0;
     }
     
     /* Decrement the outstanding RPC count */
     private void decRpcCount() {
-      rpcCount--;
+      rpcCount.decrementAndGet();
     }
     
     /* Increment the outstanding RPC count */
     private void incRpcCount() {
-      rpcCount++;
+      rpcCount.incrementAndGet();
     }
     
     private UserGroupInformation getAuthorizedUgi(String authorizedId)
@@ -2068,9 +2069,9 @@ public abstract class Server {
         LOG.debug("Ignoring socket shutdown exception", e);
       }
       if (channel.isOpen()) {
-        try {channel.close();} catch(Exception e) {}
+        IOUtils.cleanup(null, channel);
       }
-      try {socket.close();} catch(Exception e) {}
+      IOUtils.cleanup(null, socket);
     }
   }
 

+ 0 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java

@@ -86,11 +86,6 @@ public class GangliaContext31 extends GangliaContext {
       value + " from hostname" + hostName);
 
     String units = getUnits(name);
-    if (units == null) {
-      LOG.warn("Metric name " + name + ", value " + value
-        + " had 'null' units");
-      units = "";
-    }
     int slope = getSlope(name);
     int tmax = getTmax(name);
     int dmax = getDmax(name);

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java

@@ -55,7 +55,7 @@ public class CompositeContext extends AbstractMetricsContext {
     int nKids;
     try {
       String sKids = getAttribute(ARITY_LABEL);
-      nKids = Integer.valueOf(sKids);
+      nKids = Integer.parseInt(sKids);
     } catch (Exception e) {
       LOG.error("Unable to initialize composite metric " + contextName +
                 ": could not init arity", e);

+ 9 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterInt.java

@@ -23,23 +23,24 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicInteger;
+
 /**
  * A mutable int counter for implementing metrics sources
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class MutableCounterInt extends MutableCounter {
-  private volatile int value;
+  private AtomicInteger value = new AtomicInteger();
 
   MutableCounterInt(MetricsInfo info, int initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
@@ -47,18 +48,18 @@ public class MutableCounterInt extends MutableCounter {
    * @param delta of the increment
    */
   public synchronized void incr(int delta) {
-    value += delta;
+    value.addAndGet(delta);
     setChanged();
   }
 
   public int value() {
-    return value;
+    return value.get();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addCounter(info(), value);
+      builder.addCounter(info(), value());
       clearChanged();
     }
   }

+ 10 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableCounterLong.java

@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicLong;
+
 /**
  * A mutable long counter
  */
@@ -30,36 +32,35 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceStability.Evolving
 public class MutableCounterLong extends MutableCounter {
 
-  private volatile long value;
+  private AtomicLong value = new AtomicLong();
 
   MutableCounterLong(MetricsInfo info, long initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
    * Increment the value by a delta
    * @param delta of the increment
    */
-  public synchronized void incr(long delta) {
-    value += delta;
+  public void incr(long delta) {
+    value.addAndGet(delta);
     setChanged();
   }
 
   public long value() {
-    return value;
+    return value.get();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addCounter(info(), value);
+      builder.addCounter(info(), value());
       clearChanged();
     }
   }

+ 15 - 15
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeInt.java

@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicInteger;
+
 /**
  * A mutable int gauge
  */
@@ -30,44 +32,42 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceStability.Evolving
 public class MutableGaugeInt extends MutableGauge {
 
-  private volatile int value;
+  private AtomicInteger value = new AtomicInteger();
 
   MutableGaugeInt(MetricsInfo info, int initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   public int value() {
-    return value;
+    return value.get();
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
    * Increment by delta
    * @param delta of the increment
    */
-  public synchronized void incr(int delta) {
-    value += delta;
+  public void incr(int delta) {
+    value.addAndGet(delta);
     setChanged();
   }
 
   @Override
-  public synchronized void decr() {
-    --value;
-    setChanged();
+  public void decr() {
+    decr(1);
   }
 
   /**
    * decrement by delta
    * @param delta of the decrement
    */
-  public synchronized void decr(int delta) {
-    value -= delta;
+  public void decr(int delta) {
+    value.addAndGet(-delta);
     setChanged();
   }
 
@@ -76,14 +76,14 @@ public class MutableGaugeInt extends MutableGauge {
    * @param value to set
    */
   public void set(int value) {
-    this.value = value;
+    this.value.set(value);
     setChanged();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addGauge(info(), value);
+      builder.addGauge(info(), value());
       clearChanged();
     }
   }

+ 15 - 15
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGaugeLong.java

@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsInfo;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 
+import java.util.concurrent.atomic.AtomicLong;
+
 /**
  * A mutable long gauge
  */
@@ -30,44 +32,42 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 @InterfaceStability.Evolving
 public class MutableGaugeLong extends MutableGauge {
 
-  private volatile long value;
+  private AtomicLong value = new AtomicLong();
 
   MutableGaugeLong(MetricsInfo info, long initValue) {
     super(info);
-    this.value = initValue;
+    this.value.set(initValue);
   }
 
   public long value() {
-    return value;
+    return value.get();
   }
 
   @Override
-  public synchronized void incr() {
-    ++value;
-    setChanged();
+  public void incr() {
+    incr(1);
   }
 
   /**
    * Increment by delta
    * @param delta of the increment
    */
-  public synchronized void incr(long delta) {
-    value += delta;
+  public void incr(long delta) {
+    value.addAndGet(delta);
     setChanged();
   }
 
   @Override
-  public synchronized void decr() {
-    --value;
-    setChanged();
+  public void decr() {
+    decr(1);
   }
 
   /**
    * decrement by delta
    * @param delta of the decrement
    */
-  public synchronized void decr(long delta) {
-    value -= delta;
+  public void decr(long delta) {
+    value.addAndGet(-delta);
     setChanged();
   }
 
@@ -76,14 +76,14 @@ public class MutableGaugeLong extends MutableGauge {
    * @param value to set
    */
   public void set(long value) {
-    this.value = value;
+    this.value.set(value);
     setChanged();
   }
 
   @Override
   public void snapshot(MetricsRecordBuilder builder, boolean all) {
     if (all || changed()) {
-      builder.addGauge(info(), value);
+      builder.addGauge(info(), value());
       clearChanged();
     }
   }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java

@@ -287,7 +287,7 @@ public class NetUtils {
     if (fqHost == null) {
       try {
         fqHost = SecurityUtil.getByName(host).getHostName();
-        // slight race condition, but won't hurt 
+        // slight race condition, but won't hurt
         canonicalizedHostCache.put(host, fqHost);
       } catch (UnknownHostException e) {
         fqHost = host;

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMappingWithDependency.java

@@ -171,8 +171,7 @@ public class ScriptBasedMappingWithDependency  extends ScriptBasedMapping
 
     @Override
     public String toString() {
-      return super.toString() + ", " + dependencyScriptName != null ?
-          ("dependency script " + dependencyScriptName) : NO_SCRIPT;
+      return "dependency script " + dependencyScriptName;
     }
   }
 }

+ 1 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java

@@ -367,10 +367,8 @@ public class LdapGroupsMapping
       return "";
     }
     
-    Reader reader = null;
-    try {
+    try (Reader reader = new FileReader(pwFile)) {
       StringBuilder password = new StringBuilder();
-      reader = new FileReader(pwFile);
       int c = reader.read();
       while (c > -1) {
         password.append((char)c);
@@ -379,8 +377,6 @@ public class LdapGroupsMapping
       return password.toString().trim();
     } catch (IOException ioe) {
       throw new RuntimeException("Could not read password file: " + pwFile, ioe);
-    } finally {
-      IOUtils.cleanup(LOG, reader);
     }
   }
 }

+ 2 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java

@@ -195,6 +195,8 @@ public class ComparableVersion
                     case 'm':
                         value = "milestone";
                         break;
+                    default:
+                        break;
                 }
             }
             this.value = ALIASES.getProperty( value , value );

+ 7 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PrintJarMainClass.java

@@ -34,16 +34,13 @@ public class PrintJarMainClass {
    * @param args
    */
   public static void main(String[] args) {
-    try {
-      JarFile jar_file = new JarFile(args[0]);
-      if (jar_file != null) {
-        Manifest manifest = jar_file.getManifest();
-        if (manifest != null) {
-          String value = manifest.getMainAttributes().getValue("Main-Class");
-          if (value != null) {
-            System.out.println(value.replaceAll("/", "."));
-            return;
-          }
+    try (JarFile jar_file = new JarFile(args[0])) {
+      Manifest manifest = jar_file.getManifest();
+      if (manifest != null) {
+        String value = manifest.getMainAttributes().getValue("Main-Class");
+        if (value != null) {
+          System.out.println(value.replaceAll("/", "."));
+          return;
         }
       }
     } catch (Throwable e) {

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java

@@ -70,14 +70,14 @@ public class ServletUtil {
       throw new IOException("Invalid request has no " + param + " parameter");
     }
     
-    return Long.valueOf(paramStr);
+    return Long.parseLong(paramStr);
   }
 
   public static final String HTML_TAIL = "<hr />\n"
-    + "<a href='http://hadoop.apache.org/core'>Hadoop</a>, " 
+    + "<a href='http://hadoop.apache.org/core'>Hadoop</a>, "
     + Calendar.getInstance().get(Calendar.YEAR) + ".\n"
     + "</body></html>";
-  
+
   /**
    * HTML footer to be added in the jsps.
    * @return the HTML footer.