Browse Source

svn merge -c 1335505 FIXES: HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1335507 13f79535-47bb-0310-9956-ffa450edef68
Robert Joseph Evans 13 years ago
parent
commit
979a4057f6

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -61,6 +61,8 @@ Release 0.23.3 - UNRELEASED
     HADOOP-8328. Duplicate FileSystem Statistics object for 'file' scheme.
     (tomwhite)
 
+    HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

+ 1 - 1
hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java

@@ -117,7 +117,7 @@ public class HadoopArchives implements Tool {
     // will when running the mapreduce job.
     String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
     if (testJar != null) {
-      ((JobConf)conf).setJar(testJar);
+      this.conf.setJar(testJar);
     }
   }
 

+ 6 - 3
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java

@@ -129,10 +129,13 @@ public class DistCp extends Configured implements Tool {
 
     Job job = null;
     try {
-      metaFolder = createMetaFolderPath();
-      jobFS = metaFolder.getFileSystem(getConf());
+      synchronized(this) {
+        //Don't cleanup while we are setting up.
+        metaFolder = createMetaFolderPath();
+        jobFS = metaFolder.getFileSystem(getConf());
 
-      job = createJob();
+        job = createJob();
+      }
       createInputFileListing(job);
 
       job.submit();

+ 2 - 2
hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java

@@ -65,9 +65,9 @@ import org.apache.hadoop.mapreduce.lib.map.RegexMapper;
 public class Logalyzer {
   // Constants
   private static Configuration fsConfig = new Configuration();
-  public static String SORT_COLUMNS = 
+  public static final String SORT_COLUMNS = 
     "logalizer.logcomparator.sort.columns";
-  public static String COLUMN_SEPARATOR = 
+  public static final String COLUMN_SEPARATOR = 
     "logalizer.logcomparator.column.separator";
   
   static {

+ 31 - 0
hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml

@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<FindBugsFilter>
+  <And>
+    <Class name="org.apache.hadoop.tools.rumen.LoggedJob"/>
+    <Method name="getMapperTriesToSucceed"/>
+    <Bug pattern="EI_EXPOSE_REP"/>
+    <Bug code="EI"/>
+  </And>
+  <And>
+    <Class name="org.apache.hadoop.tools.rumen.ZombieJob"/>
+    <Method name="getInputSplits"/>
+    <Bug pattern="EI_EXPOSE_REP"/>
+    <Bug code="EI"/>
+  </And>
+</FindBugsFilter>

+ 10 - 0
hadoop-tools/hadoop-rumen/pom.xml

@@ -90,6 +90,16 @@
 
   <build>
     <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+         <configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>

+ 3 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.io.Serializable;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.PriorityQueue;
@@ -59,7 +60,8 @@ public class DeskewedJobTraceReader implements Closeable {
   static final private Log LOG =
       LogFactory.getLog(DeskewedJobTraceReader.class);
 
-  static private class JobComparator implements Comparator<LoggedJob> {
+  static private class JobComparator implements Comparator<LoggedJob>, 
+  Serializable {
     @Override
     public int compare(LoggedJob j1, LoggedJob j2) {
       return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1 : (j1

+ 3 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java

@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public enum JobConfPropertyNames {
@@ -33,6 +35,6 @@ public enum JobConfPropertyNames {
   }
 
   public String[] getCandidates() {
-    return candidates;
+    return Arrays.copyOf(candidates, candidates.length);
   }
 }

+ 3 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.io.Serializable;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -68,7 +69,8 @@ public class LoggedNetworkTopology implements DeepCompare {
    * order.
    * 
    */
-  static class TopoSort implements Comparator<LoggedNetworkTopology> {
+  static class TopoSort implements Comparator<LoggedNetworkTopology>, 
+  Serializable {
     public int compare(LoggedNetworkTopology t1, LoggedNetworkTopology t2) {
       return t1.name.getValue().compareTo(t2.name.getValue());
     }

+ 2 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -98,7 +99,7 @@ public class TraceBuilder extends Configured implements Tool {
      * history file names should result in the order of jobs' submission times.
      */
     private static class HistoryLogsComparator
-        implements Comparator<FileStatus> {
+        implements Comparator<FileStatus>, Serializable {
       @Override
       public int compare(FileStatus file1, FileStatus file2) {
         return file1.getPath().getName().compareTo(

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java

@@ -27,7 +27,7 @@ import org.apache.commons.lang.StringUtils;
  * //TODO There is no caching for saving memory.
  */
 public class WordListAnonymizerUtility {
-  public static final String[] KNOWN_WORDS = 
+  static final String[] KNOWN_WORDS = 
     new String[] {"job", "tmp", "temp", "home", "homes", "usr", "user", "test"};
   
   /**

+ 2 - 10
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java

@@ -93,16 +93,8 @@ public class NodeName implements AnonymizableDataType<String> {
   }
   
   public NodeName(String rName, String hName) {
-    rName = (rName == null) 
-            ? rName 
-            : rName.length() == 0 
-              ? null 
-              : rName;
-    hName = (hName == null) 
-            ? hName 
-            : hName.length() == 0 
-              ? null 
-              : hName;
+    rName = (rName == null || rName.length() == 0) ? null : rName;
+    hName = (hName == null || hName.length() == 0) ? null : hName;
     if (hName == null) {
       nodeName = rName;
       rackName = rName;

+ 30 - 0
hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml

@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<FindBugsFilter>
+  <Match>
+    <Or>
+      <Class name="org.apache.hadoop.streaming.PipeMapper" />
+      <Class name="org.apache.hadoop.streaming.PipeReducer"/>
+    </Or>
+    <Or>
+      <Method name="getFieldSeparator"/>
+      <Method name="getInputSeparator"/>
+    </Or>
+    <Bug pattern="EI_EXPOSE_REP"/>
+  </Match>
+</FindBugsFilter>

+ 10 - 0
hadoop-tools/hadoop-streaming/pom.xml

@@ -96,6 +96,16 @@
 
   <build>
     <plugins>
+       <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+         <configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>

+ 8 - 6
hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

@@ -91,7 +91,7 @@ public class StreamJob implements Tool {
   @Deprecated
   public StreamJob(String[] argv, boolean mayExit) {
     this();
-    argv_ = argv;
+    argv_ = Arrays.copyOf(argv, argv.length);
     this.config_ = new Configuration();
   }
 
@@ -113,7 +113,7 @@ public class StreamJob implements Tool {
   @Override
   public int run(String[] args) throws Exception {
     try {
-      this.argv_ = args;
+      this.argv_ = Arrays.copyOf(args, args.length);
       init();
 
       preProcessArgs();
@@ -290,7 +290,7 @@ public class StreamJob implements Tool {
         LOG.warn("-file option is deprecated, please use generic option" +
         		" -files instead.");
 
-        String fileList = null;
+        StringBuffer fileList = new StringBuffer();
         for (String file : values) {
           packageFiles_.add(file);
           try {
@@ -298,13 +298,15 @@ public class StreamJob implements Tool {
             Path path = new Path(pathURI);
             FileSystem localFs = FileSystem.getLocal(config_);
             String finalPath = path.makeQualified(localFs).toString();
-            fileList = fileList == null ? finalPath : fileList + "," + finalPath;
+            if(fileList.length() > 0) {
+              fileList.append(',');
+            }
+            fileList.append(finalPath);
           } catch (Exception e) {
             throw new IllegalArgumentException(e);
           }
         }
-        config_.set("tmpfiles", config_.get("tmpfiles", "") +
-                                  (fileList == null ? "" : fileList));
+        config_.set("tmpfiles", config_.get("tmpfiles", "") + fileList);
         validate(packageFiles_);
       }