Browse Source

MAPREDUCE-2776. Fix some of the yarn findbug warnings. (siddharth seth via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/MR-279@1153756 13f79535-47bb-0310-9956-ffa450edef68
Mahadev Konar 14 years ago
parent
commit
e9030313bb
49 changed files with 635 additions and 81 deletions
  1. 3 0
      mapreduce/CHANGES.txt
  2. 391 0
      mapreduce/dev-support/findbugs-exclude.xml
  3. 1 0
      mapreduce/mr-client/hadoop-mapreduce-client-app/pom.xml
  4. 1 0
      mapreduce/mr-client/hadoop-mapreduce-client-common/pom.xml
  5. 1 0
      mapreduce/mr-client/hadoop-mapreduce-client-core/pom.xml
  6. 1 0
      mapreduce/mr-client/hadoop-mapreduce-client-hs/pom.xml
  7. 1 0
      mapreduce/mr-client/hadoop-mapreduce-client-jobclient/pom.xml
  8. 1 0
      mapreduce/mr-client/hadoop-mapreduce-client-shuffle/pom.xml
  9. 5 0
      mapreduce/mr-client/pom.xml
  10. 12 0
      mapreduce/pom.xml
  11. 132 0
      mapreduce/yarn/dev-support/findbugs-exclude.xml
  12. 11 0
      mapreduce/yarn/pom.xml
  13. 1 0
      mapreduce/yarn/yarn-api/pom.xml
  14. 0 4
      mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java
  15. 3 0
      mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerIdPBImpl.java
  16. 1 0
      mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java
  17. 1 0
      mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueUserACLInfoPBImpl.java
  18. 1 0
      mapreduce/yarn/yarn-common/pom.xml
  19. 0 1
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ContainerLogAppender.java
  20. 3 3
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java
  21. 4 4
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java
  22. 3 3
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/YarnRemoteExceptionFactoryProvider.java
  23. 1 1
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java
  24. 1 2
      mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
  25. 1 0
      mapreduce/yarn/yarn-server/yarn-server-common/pom.xml
  26. 1 0
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/pom.xml
  27. 1 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java
  28. 5 10
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DeletionService.java
  29. 1 15
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
  30. 1 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
  31. 1 3
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
  32. 1 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
  33. 2 4
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java
  34. 3 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java
  35. 8 3
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/FSDownload.java
  36. 1 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTrackerImpl.java
  37. 2 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
  38. 1 1
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/security/LocalizerTokenIdentifier.java
  39. 3 2
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
  40. 1 2
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
  41. 1 0
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java
  42. 1 0
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java
  43. 1 0
      mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
  44. 1 0
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/pom.xml
  45. 6 6
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMConfig.java
  46. 4 4
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKStore.java
  47. 1 0
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerNode.java
  48. 7 7
      mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
  49. 1 0
      mapreduce/yarn/yarn-server/yarn-server-tests/pom.xml

+ 3 - 0
mapreduce/CHANGES.txt

@@ -3,6 +3,9 @@ Hadoop MapReduce Change Log
 Trunk (unreleased changes)
 
   MAPREDUCE-279
+    
+    MAPREDUCE-2776. Fix some of the yarn findbug warnings. (siddharth 
+    seth via mahadev)
 
     Fix NPE in FifoScheduler. (mahadev)
 

+ 391 - 0
mapreduce/dev-support/findbugs-exclude.xml

@@ -0,0 +1,391 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<FindBugsFilter>
+     <Match>
+       <Package name="org.apache.hadoop.record.compiler.generated" />
+     </Match>
+     <Match>
+       <Bug pattern="EI_EXPOSE_REP" />
+     </Match>
+     <Match>
+       <Bug pattern="EI_EXPOSE_REP2" />
+     </Match>
+     <Match>
+       <Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
+     </Match>
+     <Match>
+       <Class name="~.*_jsp" />
+       <Bug pattern="DLS_DEAD_LOCAL_STORE" />
+     </Match>
+     <Match>
+       <Class name="~.*_jspx" />
+       <Bug pattern="DLS_DEAD_LOCAL_STORE" />
+     </Match>
+     <Match>
+       <Field name="_jspx_dependants" />
+       <Bug pattern="UWF_UNWRITTEN_FIELD" />
+     </Match>
+     <!-- 
+       Inconsistent synchronization for Client.Connection.out is
+       is intentional to make a connection to be closed instantly. 
+     --> 
+     <Match>
+       <Class name="org.apache.hadoop.ipc.Client$Connection" />
+       <Field name="out" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+     <!-- 
+       Ignore Cross Scripting Vulnerabilities
+     -->
+     <Match>
+       <Package name="~org.apache.hadoop.mapred.*" />
+       <Bug code="XSS" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.taskdetails_jsp" />
+       <Bug code="HRS" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.jobdetails_jsp"/>
+       <Bug pattern="HRS_REQUEST_PARAMETER_TO_HTTP_HEADER"/>
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.taskstats_jsp"/>
+       <Bug pattern="HRS_REQUEST_PARAMETER_TO_HTTP_HEADER"/>
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.jobtasks_jsp"/>
+       <Bug pattern="HRS_REQUEST_PARAMETER_TO_HTTP_HEADER"/>
+     </Match>
+     <!--
+       Ignore warnings where child class has the same name as
+       super class. Classes based on Old API shadow names from
+       new API. Should go off after HADOOP-1.0
+     -->
+     <Match>
+       <Class name="~org.apache.hadoop.mapred.*" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
+     </Match>
+     <Match>
+       <Class name="~org.apache.hadoop.mapred.*" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+     </Match>
+     <Match>
+       <Class name="~org.apache.hadoop.mapred.lib.aggregate.*" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+     </Match>
+     <Match>
+       <Class name="~org.apache.hadoop.mapred.join.*" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.SequenceFileInputFilter$Filter" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+     </Match>
+     <Match>
+       <Class name="~org.apache.hadoop.util.*" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
+     </Match>
+     <Match>
+       <Class name="~org.apache.hadoop.filecache.*" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
+     </Match>
+     <!--
+       Ignore warnings for usage of System.exit. This is
+       required and have been well thought out
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.Child$2" />
+       <Method name="run" />
+       <Bug pattern="DM_EXIT" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.JobTracker" />
+       <Method name="addHostToNodeMapping" />
+       <Bug pattern="DM_EXIT" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.Task" />
+       <Or>
+       <Method name="done" />
+       <Method name="commit" />
+       <Method name="statusUpdate" />
+       </Or>
+       <Bug pattern="DM_EXIT" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.JobTracker" />
+       <Field name="clock" />
+       <Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.Task$TaskReporter" />
+       <Method name="run" />
+       <Bug pattern="DM_EXIT" />
+     </Match>
+     <!--
+       We need to cast objects between old and new api objects
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.OutputCommitter" />
+       <Bug pattern="BC_UNCONFIRMED_CAST" />
+     </Match>
+     <!--
+       We intentionally do the get name from the inner class
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.TaskTracker$MapEventsFetcherThread" />
+       <Method name="run" />
+       <Bug pattern="IA_AMBIGUOUS_INVOCATION_OF_INHERITED_OR_OUTER_METHOD" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.FileOutputCommitter" />
+       <Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.OutputCommitter" />
+       <Or>
+       <Method name="abortJob" />
+       <Method name="commitJob" />
+       <Method name="cleanupJob" />
+       </Or>
+       <Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.lib.db.DBInputFormat$DBRecordReader" />
+       <Method name="next" />
+       <Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
+     </Match>
+     <!--
+       Ignoring this warning as resolving this would need a non-trivial change in code 
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorBaseDescriptor" />
+       <Method name="configure" />
+       <Field name="maxNumItems" />
+       <Bug pattern="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD" />
+     </Match>
+     <!--
+       Comes from org.apache.jasper.runtime.ResourceInjector. Cannot do much.
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.jobqueue_005fdetails_jsp" />
+       <Field name="_jspx_resourceInjector" />
+       <Bug pattern="SE_BAD_FIELD" />
+     </Match>
+     <!--
+       Storing textInputFormat and then passing it as a parameter. Safe to ignore.
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.lib.aggregate.ValueAggregatorJob" />
+       <Method name="createValueAggregatorJob" />
+       <Bug pattern="DLS_DEAD_STORE_OF_CLASS_LITERAL" />
+     </Match>
+     <!--
+       Can remove this after the upgrade to findbugs1.3.8
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.lib.db.DBInputFormat" />
+       <Method name="getSplits" />
+       <Bug pattern="DLS_DEAD_LOCAL_STORE" />
+     </Match>
+     <!--
+       org.apache.hadoop.mapred.IndexCache is thread-safe. It does not need
+       synchronous access. 
+      -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.TaskTracker" />
+       <Field name="indexCache" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+    <!--
+      None of the following variables should be referenced by any thread
+      but the collection thread in MapTask
+    -->
+     <Match>
+       <Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer" />
+       <Field name="kvindex" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer" />
+       <Field name="bufferRemaining" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer" />
+       <Field name="equator" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+
+    <!-- This is spurious. -->
+    <Match>
+      <Class name="org.apache.hadoop.mapred.MapTask$MapOutputBuffer$SpillThread" />
+      <Method name="run" />
+      <Bug pattern="UL_UNRELEASED_LOCK_EXCEPTION_PATH" />
+    </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.mapreduce.task.reduce.MergeThread" />
+       <Field name="inputs" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.JobTracker" />
+       <Method name="updateTaskTrackerStatus" />
+       <Bug pattern="DLS_DEAD_LOCAL_STORE" />
+     </Match>
+
+    <!--
+     This class is unlikely to get subclassed, so ignore
+    -->
+     <Match>
+       <Class name="org.apache.hadoop.mapreduce.task.reduce.MergeManager" />
+       <Bug pattern="SC_START_IN_CTOR" />
+     </Match>
+
+    <!--
+      Do not bother if equals is not implemented. We will not need it here
+    -->
+     <Match>
+      <Class name="org.apache.hadoop.mapreduce.task.reduce.ShuffleScheduler$Penalty" />
+      <Bug pattern="EQ_COMPARETO_USE_OBJECT_EQUALS" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.mapred.Task" />
+       <Method name="reportFatalError" />
+       <Bug pattern="DM_EXIT" />
+     </Match>
+
+     <!-- 
+        core changes 
+     -->
+     <Match>
+       <Class name="~org.apache.hadoop.*" />
+       <Bug code="MS" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.fs.FileSystem" />
+       <Method name="checkPath" />
+       <Bug pattern="ES_COMPARING_STRINGS_WITH_EQ" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.fs.kfs.KFSOutputStream" />
+       <Field name="path" />
+       <Bug pattern="URF_UNREAD_FIELD" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.fs.kfs.KosmosFileSystem" />
+       <Method name="initialize" />
+       <Bug pattern="DM_EXIT" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.io.Closeable" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_INTERFACE" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.security.AccessControlException" />
+       <Bug pattern="NM_SAME_SIMPLE_NAME_AS_SUPERCLASS" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.record.meta.Utils" />
+       <Method name="skip" />
+       <Bug pattern="BC_UNCONFIRMED_CAST" />
+     </Match>
+
+     <!--
+        The compareTo method is actually a dummy method that just
+        throws excpetions. So, no need to override equals. Ignore
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.record.meta.RecordTypeInfo" />
+       <Bug pattern="EQ_COMPARETO_USE_OBJECT_EQUALS" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.util.ProcfsBasedProcessTree" />
+       <Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
+     </Match>
+
+     <!--
+       Streaming, Examples
+     -->
+     <Match>
+       <Class name="org.apache.hadoop.streaming.StreamUtil$TaskId" />
+       <Bug pattern="URF_UNREAD_FIELD" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.examples.DBCountPageView" />
+       <Method name="verify" />
+       <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
+     </Match>
+
+     <Match>
+       <Class name="org.apache.hadoop.examples.ContextFactory" />
+       <Method name="setAttributes" />
+       <Bug pattern="OBL_UNSATISFIED_OBLIGATION" />
+     </Match>
+
+    <Match>
+       <Class name="org.apache.hadoop.mapred.TaskScheduler$QueueRefresher" />
+       <Bug pattern="SIC_INNER_SHOULD_BE_STATIC" />
+    </Match>
+
+    <Match>
+      <Class name="org.apache.hadoop.examples.terasort.TeraInputFormat$1" />
+      <Method name="run" />
+      <Bug pattern="DM_EXIT" />
+    </Match>
+    <Match>
+      <Class name="org.apache.hadoop.examples.terasort.TeraOutputFormat$TeraOutputCommitter"/>
+      <Bug pattern="NM_WRONG_PACKAGE_INTENTIONAL" />
+    </Match>
+    <Match>
+      <Class name="org.apache.hadoop.examples.terasort.Unsigned16" />
+      <Method name="getHexDigit"/>
+      <Bug pattern="ICAST_QUESTIONABLE_UNSIGNED_RIGHT_SHIFT" />
+     </Match>
+
+     <!-- 
+	   False positive of FindBugs complaining that initializationPoller and
+	   started were accessed through
+	   CapacitySchedulerQueueRefresher.refreshQueues without explicitly holding
+	   the lock of CapacityTaskScheduler. The lock is in fact acquired by
+	   JobTracker prior to calling QueueManager.refereshQueues - this is
+	   necessary to ensure the order of locking (TaskScheduler ->
+	   QueueManager).
+     --> 
+    <Match>
+       <Class name="org.apache.hadoop.mapred.CapacityTaskScheduler" />
+       <Field name="initializationPoller" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+    </Match>
+    <Match>
+       <Class name="org.apache.hadoop.mapred.CapacityTaskScheduler" />
+       <Field name="started" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+    </Match>
+ </FindBugsFilter>

+ 1 - 0
mapreduce/mr-client/hadoop-mapreduce-client-app/pom.xml

@@ -13,6 +13,7 @@
   <properties>
     <install.file>${project.artifact.file}</install.file>
     <applink.base>${project.build.directory}/${project.name}</applink.base>
+    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
   </properties>
 
   <dependencies>

+ 1 - 0
mapreduce/mr-client/hadoop-mapreduce-client-common/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
   </properties>
 
   <dependencies>

+ 1 - 0
mapreduce/mr-client/hadoop-mapreduce-client-core/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
   </properties>
   
   <dependencies>

+ 1 - 0
mapreduce/mr-client/hadoop-mapreduce-client-hs/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
   </properties>
 
   <dependencies>

+ 1 - 0
mapreduce/mr-client/hadoop-mapreduce-client-jobclient/pom.xml

@@ -13,6 +13,7 @@
   <properties>
     <install.file>${project.artifact.file}</install.file>
     <fork.mode>always</fork.mode>
+    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
   </properties>
 
   <dependencies>

+ 1 - 0
mapreduce/mr-client/hadoop-mapreduce-client-shuffle/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <mr.basedir>${project.parent.parent.basedir}</mr.basedir>
   </properties>
 
   <dependencies>

+ 5 - 0
mapreduce/mr-client/pom.xml

@@ -11,6 +11,11 @@
   <name>hadoop-mapreduce-client</name>
   <packaging>pom</packaging>
 
+  <properties>
+    <mr.basedir>${project.parent.basedir}</mr.basedir>
+  </properties>
+
+
   <dependencyManagement>
     <dependencies>
       <!-- begin MNG-4223 workaround -->

+ 12 - 0
mapreduce/pom.xml

@@ -19,6 +19,7 @@
     <install.pom>${project.build.directory}/saner-pom.xml</install.pom>
     <install.file>${install.pom}</install.file>
     <fork.mode>once</fork.mode>
+    <mr.basedir>=${basedir}</mr.basedir>
   </properties>
 
   <repositories>
@@ -322,6 +323,16 @@
           </execution>
         </executions>
       </plugin>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>findbugs-maven-plugin</artifactId>
+          <configuration>
+            <findbugsXmlOutput>true</findbugsXmlOutput>
+            <xmlOutput>true</xmlOutput>
+            <excludeFilterFile>${mr.basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+            <effort>Max</effort>
+          </configuration>
+        </plugin>
     </plugins>
   </build>
 
@@ -361,6 +372,7 @@
         <version>2.3.2</version>
         <configuration>
           <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
         </configuration>
       </plugin>
       <plugin>

+ 132 - 0
mapreduce/yarn/dev-support/findbugs-exclude.xml

@@ -0,0 +1,132 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<FindBugsFilter>
+  <!-- Ignore PB Generated Code -->
+  <Match>
+    <Package name="org.apache.hadoop.yarn.proto" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.ipc\.RpcProtos.*" />
+  </Match>
+
+  <!-- Ignore unchecked Event casts -->
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.nodemanager\.containermanager\.application\.ApplicationImpl.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.nodemanager\.containermanager\.container\.ContainerImpl.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.nodemanager\.containermanager\.localizer\.LocalizedResource.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.nodemanager\.containermanager\.localizer\.ResourceLocalizationService.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorImpl" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.rmapp\.attempt\.RMAppAttemptImpl.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl$AppRejectedTransition" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.rmcontainer\.RMContainerImpl.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.rmnode\.RMNodeImpl.*" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.scheduler\.capacity\.CapacityScheduler.*" />
+    <Method name="handle" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.scheduler\.fifo\.FifoScheduler.*" />
+    <Method name="handle" />
+    <Bug pattern="BC_UNCONFIRMED_CAST" />
+  </Match>
+
+  <!-- Ignore intentional switch fallthroughs -->
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerImpl" />
+    <Method name="finished" />
+    <Bug pattern="SF_SWITCH_FALLTHROUGH" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer" />
+    <Method name="localizeFiles" />
+    <Bug pattern="SF_SWITCH_FALLTHROUGH" />
+  </Match>
+
+  <!-- Ignore some irrelevant serialization warnings -->
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceRetentionSet$LRUComparator" />
+    <Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.exceptions.impl.pb.YarnRemoteExceptionPBImpl" />
+    <Field name="builder" />
+    <Bug pattern="SE_BAD_FIELD" />
+  </Match>
+  <Match>
+    <Class name="~org\.apache\.hadoop\.yarn\.util\.BuilderUtils.*" />
+    <Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.resourcemanager.resource.Priority$Comparator" />
+    <Bug pattern="SE_COMPARATOR_SHOULD_BE_SERIALIZABLE" />
+  </Match>
+
+  <!-- Inconsistent sync warning - only start() is synchronized-->
+  <Match>
+    <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.LogAggregationService" />
+    <Bug pattern="IS2_INCONSISTENT_SYNC" />
+  </Match>
+
+  <!-- Don't care if putIfAbsent value is ignored -->
+  <Match>
+    <Package name="org.apache.hadoop.yarn.factories.impl.pb" />
+    <Bug pattern="RV_RETURN_VALUE_OF_PUTIFABSENT_IGNORED" />
+  </Match>
+
+  <!-- Intended System.exit calls -->
+  <Match>
+    <Class name="org.apache.hadoop.yarn.webapp.Dispatcher$1" />
+    <Bug pattern="DM_EXIT" />
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.yarn.webapp.WebApps$Builder" />
+    <Bug pattern="DM_EXIT" />
+  </Match>
+
+ </FindBugsFilter>

+ 11 - 0
mapreduce/yarn/pom.xml

@@ -17,6 +17,7 @@
     <yarn.version>1.0-SNAPSHOT</yarn.version>
     <install.pom>${project.build.directory}/saner-pom.xml</install.pom>
     <install.file>${install.pom}</install.file>
+    <yarn.basedir>${basedir}</yarn.basedir>
   </properties>
 
   <repositories>
@@ -347,6 +348,16 @@
           </execution>
         </executions>
       </plugin>
+	  <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+		<configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${yarn.basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+	  </plugin>
     </plugins>
   </build>
 

+ 1 - 0
mapreduce/yarn/yarn-api/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <yarn.basedir>${project.parent.basedir}</yarn.basedir>
   </properties>
 
   <build>

+ 0 - 4
mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java

@@ -264,10 +264,6 @@ implements ApplicationReport {
     return new ApplicationIdPBImpl(applicationId);
   }
 
-  private ContainerProto convertToProtoFormat(Container t) {
-    return ((ContainerPBImpl) t).getProto();
-  }
-
   private Container convertFromProtoFormat(ContainerProto c) {
     return new ContainerPBImpl(c);
   }

+ 3 - 0
mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerIdPBImpl.java

@@ -151,6 +151,9 @@ public class ContainerIdPBImpl extends ProtoBase<ContainerIdProto> implements Co
 
   @Override
   public boolean equals(Object other) {
+    if (other == null) {
+      return false;
+    }
     if (other.getClass().isAssignableFrom(this.getClass())) {
       return this.getProto().equals(this.getClass().cast(other).getProto());
     }

+ 1 - 0
mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java

@@ -117,6 +117,7 @@ public class QueueInfoPBImpl extends ProtoBase<QueueInfoProto> implements
     maybeInitBuilder();
     if (queueName == null) {
       builder.clearQueueName();
+      return;
     }
     builder.setQueueName(queueName);
   }

+ 1 - 0
mapreduce/yarn/yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueUserACLInfoPBImpl.java

@@ -47,6 +47,7 @@ implements QueueUserACLInfo {
     maybeInitBuilder();
     if (queueName == null) {
       builder.clearQueueName();
+      return;
     }
     builder.setQueueName(queueName);
   }

+ 1 - 0
mapreduce/yarn/yarn-common/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <yarn.basedir>${project.parent.basedir}</yarn.basedir>
   </properties>
 
   <dependencies>

+ 0 - 1
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/ContainerLogAppender.java

@@ -37,7 +37,6 @@ public class ContainerLogAppender extends FileAppender {
   //so that log4j can configure it from the configuration(log4j.properties). 
   private int maxEvents;
   private Queue<LoggingEvent> tail = null;
-  private boolean isCleanup;
 
   @Override
   public void activateOptions() {

+ 3 - 3
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java

@@ -10,10 +10,10 @@ import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl;
 
 public class RecordFactoryProvider {
 
-  public static String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
-  public static String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
+  public static final String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
+  public static final String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
   
-  public static String RECORD_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.record.factory.class";
+  public static final String RECORD_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.record.factory.class";
   
   private static Configuration defaultConf;
   

+ 4 - 4
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java

@@ -18,11 +18,11 @@ import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl;
 public class RpcFactoryProvider {
   private static final Log LOG = LogFactory.getLog(RpcFactoryProvider.class);
   //TODO Move these keys to CommonConfigurationKeys
-  public static String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
-  public static String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
+  public static final String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
+  public static final String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
 
-  public static String RPC_CLIENT_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.client.factory.class";
-  public static String RPC_SERVER_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.server.factory.class";
+  public static final String RPC_CLIENT_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.client.factory.class";
+  public static final String RPC_SERVER_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.server.factory.class";
   
   private RpcFactoryProvider() {
     

+ 3 - 3
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/YarnRemoteExceptionFactoryProvider.java

@@ -10,10 +10,10 @@ import org.apache.hadoop.yarn.factories.impl.pb.YarnRemoteExceptionFactoryPBImpl
 
 public class YarnRemoteExceptionFactoryProvider {
 
-  public static String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
-  public static String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
+  public static final String RPC_SERIALIZER_KEY = "org.apache.yarn.ipc.rpc.serializer.property";
+  public static final String RPC_SERIALIZER_DEFAULT = "protocolbuffers";
   
-  public static String EXCEPTION_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.exception.factory.class";
+  public static final String EXCEPTION_FACTORY_CLASS_KEY = "org.apache.yarn.ipc.exception.factory.class";
   
   private YarnRemoteExceptionFactoryProvider() {
   }

+ 1 - 1
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java

@@ -99,7 +99,7 @@ public abstract class ResourceCalculatorPlugin extends Configured {
   @InterfaceStability.Unstable
   public abstract ProcResourceValues getProcResourceValues();
 
-  public class ProcResourceValues {
+  public static class ProcResourceValues {
     private final long cumulativeCpuTime;
     private final long physicalMemorySize;
     private final long virtualMemorySize;

+ 1 - 2
mapreduce/yarn/yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java

@@ -158,7 +158,7 @@ public class WebApps {
         throw new WebAppException("Error starting http server", e);
       }
       Injector injector = Guice.createInjector(webapp, new AbstractModule() {
-        @Override @SuppressWarnings("unchecked")
+        @Override
         protected void configure() {
           if (api != null) {
             bind(api).toInstance(application);
@@ -211,7 +211,6 @@ public class WebApps {
   }
 
   // Ditto
-  @SuppressWarnings("unchecked")
   public static <T> Builder<T> $for(T app) {
     return $for("", app);
   }

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-common/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
   </properties>
 
   <dependencies>

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-nodemanager/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
   </properties>
 
   <dependencies>

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java

@@ -134,7 +134,7 @@ public class DefaultContainerExecutor extends ContainerExecutor {
           new File(containerWorkDir.toUri().getPath()));
       launchCommandObjs.put(containerId, shExec);
       shExec.execute();
-    } catch (Exception e) {
+    } catch (IOException e) {
       if (null == shExec) {
         return -1;
       }

+ 5 - 10
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DeletionService.java

@@ -45,7 +45,7 @@ public class DeletionService extends AbstractService {
 
   private int debugDelay;
   private final ContainerExecutor exec;
-  private final ScheduledThreadPoolExecutor sched;
+  private ScheduledThreadPoolExecutor sched;
   private final FileContext lfs = getLfs();
   static final FileContext getLfs() {
     try {
@@ -56,16 +56,8 @@ public class DeletionService extends AbstractService {
   }
 
   public DeletionService(ContainerExecutor exec) {
-    this(exec, new ScheduledThreadPoolExecutor(1));
-    sched.setMaximumPoolSize(DEFAULT_MAX_DELETE_THREADS);
-    sched.setKeepAliveTime(60L, SECONDS);
-  }
-
-  public DeletionService(ContainerExecutor exec,
-      ScheduledThreadPoolExecutor sched) {
     super(DeletionService.class.getName());
     this.exec = exec;
-    this.sched = sched;
     this.debugDelay = 0;
   }
 
@@ -83,10 +75,13 @@ public class DeletionService extends AbstractService {
   @Override
   public void init(Configuration conf) {
     if (conf != null) {
-      sched.setMaximumPoolSize(
+      sched = new ScheduledThreadPoolExecutor(
           conf.getInt(NM_MAX_DELETE_THREADS, DEFAULT_MAX_DELETE_THREADS));
       debugDelay = conf.getInt(DEBUG_DELAY_SEC, 0);
+    } else {
+      sched = new ScheduledThreadPoolExecutor(DEFAULT_MAX_DELETE_THREADS);
     }
+    sched.setKeepAliveTime(60L, SECONDS);
     super.init(conf);
   }
 

+ 1 - 15
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java

@@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.server.nodemanager.NMConfig.NM_CONTAINER_EX
 import static org.apache.hadoop.yarn.server.nodemanager.NMConfig.NM_KEYTAB;
 
 import java.io.IOException;
-import java.util.Comparator;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ConcurrentSkipListMap;
@@ -158,20 +157,7 @@ public class NodeManager extends CompositeService {
     private final ConcurrentMap<ApplicationId, Application> applications =
         new ConcurrentHashMap<ApplicationId, Application>();
     private final ConcurrentMap<ContainerId, Container> containers =
-      new ConcurrentSkipListMap<ContainerId,Container>(
-          new Comparator<ContainerId>() {
-            @Override
-            public int compare(ContainerId a, ContainerId b) {
-              if (a.getAppId().getId() == b.getAppId().getId()) {
-                return a.getId() - b.getId();
-              }
-              return a.getAppId().getId() - b.getAppId().getId();
-            }
-            @Override
-            public boolean equals(Object other) {
-              return getClass().equals(other.getClass());
-            }
-          });
+        new ConcurrentSkipListMap<ContainerId, Container>();
 
     private final NodeHealthStatus nodeHealthStatus = RecordFactoryProvider
         .getRecordFactory(null).newRecordInstance(NodeHealthStatus.class);

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java

@@ -187,7 +187,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements
 
   @Override
   public byte[] getRMNMSharedSecret() {
-    return this.secretKeyBytes;
+    return this.secretKeyBytes.clone();
   }
 
   private NodeStatus getNodeStatus() {

+ 1 - 3
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java

@@ -29,7 +29,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerDiagnosticsUpdateEvent;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerInitEvent;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerKillEvent;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ApplicationLocalizationEvent;
@@ -186,7 +185,6 @@ public class ApplicationImpl implements Application {
     @Override
     public void transition(ApplicationImpl app, ApplicationEvent event) {
 
-      ApplicationInitedEvent initedEvent = (ApplicationInitedEvent) event;
       // Inform the logAggregator
       app.dispatcher.getEventHandler().handle(
             new LogAggregatorAppStartedEvent(app.appId, app.user,
@@ -214,7 +212,7 @@ public class ApplicationImpl implements Application {
             container.getContainerID()));
     }
   }
-
+  
   static final class ContainerDoneTransition implements
       SingleArcTransition<ApplicationImpl, ApplicationEvent> {
     @Override

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java

@@ -659,7 +659,7 @@ public class ContainerImpl implements Container {
       container.finished();
     }
   }
-
+  
   static class ContainerDiagnosticsUpdateTransition implements
       SingleArcTransition<ContainerImpl, ContainerEvent> {
     @Override

+ 2 - 4
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java

@@ -40,7 +40,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService;
 import org.apache.hadoop.yarn.service.AbstractService;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 
 /**
  * The launcher for the containers. This service should be started only after
@@ -81,7 +80,8 @@ public class ContainersLauncher extends AbstractService
   @Override
   public void init(Configuration conf) {
     try {
-      FileContext lfs = FileContext.getLocalFSFileContext(conf);
+      //TODO Is this required?
+      FileContext.getLocalFSFileContext(conf);
     } catch (UnsupportedFileSystemException e) {
       throw new YarnException("Failed to start ContainersLauncher", e);
     }
@@ -104,8 +104,6 @@ public class ContainersLauncher extends AbstractService
       case LAUNCH_CONTAINER:
         Application app =
           context.getApplications().get(containerId.getAppId());
-        String appIdStr = ConverterUtils.toString(app.getAppId());
-        // TODO set in Application
       ContainerLaunch launch =
           new ContainerLaunch(getConfig(), dispatcher, exec, app,
               event.getContainer());

+ 3 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java

@@ -211,7 +211,9 @@ public class ContainerLocalizer {
               final LocalDirAllocator lda;
               switch (r.getVisibility()) {
               default:
-                LOG.warn("Unknown visibility: " + r.getVisibility());
+                LOG.warn("Unknown visibility: " + r.getVisibility()
+                        + ", Using userDirs");
+                //Falling back to userDirs for unknown visibility.
               case PUBLIC:
               case PRIVATE:
                 lda = userDirs;

+ 8 - 3
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/FSDownload.java

@@ -87,7 +87,6 @@ public class FSDownload implements Callable<Path> {
   }
 
   private long unpack(File localrsrc, File dst) throws IOException {
-    File destDir = new File(localrsrc.getParent());
     switch (resource.getType()) {
     case ARCHIVE:
       String lowerDst = dst.getName().toLowerCase();
@@ -101,12 +100,18 @@ public class FSDownload implements Callable<Path> {
         FileUtil.unTar(localrsrc, dst);
       } else {
         LOG.warn("Cannot unpack " + localrsrc);
-        localrsrc.renameTo(dst);
+        if (!localrsrc.renameTo(dst)) {
+            throw new IOException("Unable to rename file: [" + localrsrc
+              + "] to [" + dst + "]");
+        }
       }
       break;
     case FILE:
     default:
-      localrsrc.renameTo(dst);
+      if (!localrsrc.renameTo(dst)) {
+        throw new IOException("Unable to rename file: [" + localrsrc
+          + "] to [" + dst + "]");
+      }
       break;
     }
     return 0;

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTrackerImpl.java

@@ -67,7 +67,7 @@ class LocalResourcesTrackerImpl implements LocalResourcesTracker {
       break;
     case RELEASE:
       if (null == rsrc) {
-        LOG.info("Release unknown rsrc " + rsrc + " (discard)");
+        LOG.info("Release unknown rsrc null (discard)");
         return;
       }
       break;

+ 2 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java

@@ -357,7 +357,8 @@ public class ResourceLocalizationService extends AbstractService
 
       Application application =
           ((ApplicationLocalizationEvent) event).getApplication();
-      LocalResourcesTracker appLocalRsrcsTracker = appRsrc.remove(application);
+      LocalResourcesTracker appLocalRsrcsTracker =
+        appRsrc.remove(ConverterUtils.toString(application.getAppId()));
       if (null == appLocalRsrcsTracker) {
         LOG.warn("Removing uninitialized application " + application);
       }

+ 1 - 1
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/security/LocalizerTokenIdentifier.java

@@ -28,7 +28,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
 
 public class LocalizerTokenIdentifier extends TokenIdentifier {
 
-  public static Text KIND = new Text("Localizer");
+  public static final Text KIND = new Text("Localizer");
 
   @Override
   public void write(DataOutput out) throws IOException {

+ 3 - 2
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java

@@ -106,7 +106,7 @@ public class ContainerLogsPage extends NMView {
           } catch (Exception e) {
             div.h1("Cannot find this log on the local disk.")._();
           }
-          div.h1(logFile.getName());
+          div.h1(logFile == null ? "Unknown LogFile" : logFile.getName());
           long start =
               $("start").isEmpty() ? -4 * 1024 : Long.parseLong($("start"));
           start = start < 0 ? logFile.length() + start : start;
@@ -128,6 +128,7 @@ public class ContainerLogsPage extends NMView {
                     ._(" for full log").br()._();
             }
             // TODO: Use secure IO Utils to avoid symlink attacks.
+            //TODO Fix findBugs close warning along with IOUtils change
             FileReader reader = new FileReader(logFile);
             char[] cbuf = new char[65536];
             reader.skip(start);
@@ -145,7 +146,7 @@ public class ContainerLogsPage extends NMView {
               writer().write(
                   "Exception reading log-file "
                       + StringUtils.stringifyException(e));
-          }
+          } 
         }
           div._();
         } else {

+ 1 - 2
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java

@@ -23,7 +23,6 @@ import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import org.apache.hadoop.yarn.server.nodemanager.NMConfig;
@@ -77,7 +76,7 @@ public class WebServer extends AbstractService {
     super.stop();
   }
 
-  public class NMWebApp extends WebApp implements NMWebParams {
+  public static class NMWebApp extends WebApp implements NMWebParams {
 
     private final ResourceView resourceView;
 

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java

@@ -143,6 +143,7 @@ public abstract class BaseContainerManagerTest {
             + ", baseDirs - " + baseDirs); 
       };
     };
+    delSrvc.init(conf);
 
     exec = createContainerExecutor();
     containerManager =

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java

@@ -264,6 +264,7 @@ public class TestContainerManager extends BaseContainerManagerTest {
       IOException {
     // Real del service
     delSrvc = new DeletionService(exec);
+    delSrvc.init(conf);
     containerManager = new ContainerManagerImpl(context, exec, delSrvc,
         nodeStatusUpdater, metrics);
     containerManager.init(conf);

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java

@@ -90,6 +90,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest {
   @Test
   public void testLocalFileDeletionAfterUpload() throws IOException {
     this.delSrvc = new DeletionService(createContainerExecutor());
+    this.delSrvc.init(conf);
     this.conf.set(NMConfig.NM_LOG_DIR, localLogDir.getAbsolutePath());
     this.conf.set(NMConfig.REMOTE_USER_LOG_DIR,
         this.remoteRootLogDir.getAbsolutePath());

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/pom.xml

@@ -12,6 +12,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
   </properties>
 
   <dependencies>

+ 6 - 6
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMConfig.java

@@ -74,15 +74,15 @@ public class RMConfig {
   public static final String RM_ACLS_ENABLED = YarnConfiguration.RM_PREFIX +
     "acls.enabled";
   
-  public static String RM_ADMIN_ACL = 
+  public static final String RM_ADMIN_ACL = 
     YarnConfiguration.RM_PREFIX + "admin.acl";
-  public static String DEFAULT_RM_ADMIN_ACL = "*"; 
+  public static final String DEFAULT_RM_ADMIN_ACL = "*"; 
 
-  public static String RM_NODES_INCLUDE_FILE = 
+  public static final String RM_NODES_INCLUDE_FILE = 
     YarnConfiguration.RM_PREFIX + "nodes.include";
-  public static String DEFAULT_RM_NODES_INCLUDE_FILE = "";
+  public static final String DEFAULT_RM_NODES_INCLUDE_FILE = "";
   
-  public static String RM_NODES_EXCLUDE_FILE = 
+  public static final String RM_NODES_EXCLUDE_FILE = 
     YarnConfiguration.RM_PREFIX + "nodes.exclude";
-  public static String DEFAULT_RM_NODES_EXCLUDE_FILE = "";
+  public static final String DEFAULT_RM_NODES_EXCLUDE_FILE = "";
 }

+ 4 - 4
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKStore.java

@@ -83,7 +83,7 @@ public class ZKStore implements Store {
    * of connection and session events.
    *
    */
-  private class ZKWatcher implements Watcher {
+  private static class ZKWatcher implements Watcher {
     @Override
     public void process(WatchedEvent arg0) {
     }
@@ -124,6 +124,7 @@ public class ZKStore implements Store {
     /** create a storage node and store it in zk **/
     if (!doneWithRecovery) return;
     NodeReportPBImpl nodeManagerInfo = createNodeManagerInfo(node);
+    // TODO FinBugs - will be fixed after the subsequent fixme
     byte[] bytes = nodeManagerInfo.getProto().toByteArray();
     // TODO: FIXMEVinodkv
 //    try {
@@ -228,8 +229,6 @@ public class ZKStore implements Store {
     @Override
     public synchronized void removeContainer(Container container) throws IOException {
       if (!doneWithRecovery) return;
-      
-      ContainerPBImpl containerPBImpl = (ContainerPBImpl) container;
       try { 
         zkClient.delete(APPS + containerPathFromContainerId(container.getId()),
             -1);
@@ -325,7 +324,7 @@ public class ZKStore implements Store {
     return rmState;
   }  
 
-  private class ApplicationInfoImpl implements ApplicationInfo {
+  private static class ApplicationInfoImpl implements ApplicationInfo {
     private ApplicationMaster master;
     private Container masterContainer;
 
@@ -477,6 +476,7 @@ public class ZKStore implements Store {
           continue;
         }
         int httpPort = Integer.valueOf(m.group(1));
+        // TODO: FindBugs Valid. Fix
         RMNode nm = new RMNodeImpl(node.getNodeId(), null,
             hostName, cmPort, httpPort,
             ResourceTrackerService.resolve(node.getNodeId().getHost()), 

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerNode.java

@@ -144,6 +144,7 @@ public class SchedulerNode {
     if (resource == null) {
       LOG.error("Invalid deduction of null resource for "
           + rmNode.getNodeAddress());
+      return;
     }
     Resources.subtractFrom(availableResource, resource);
     Resources.addTo(usedResource, resource);

+ 7 - 7
mapreduce/yarn/yarn-server/yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java

@@ -80,25 +80,25 @@ public class CapacitySchedulerConfiguration extends Configuration {
     PREFIX + "maximum-allocation-mb";
 
   @Private
-  public static int DEFAULT_MAXIMUM_SYSTEM_APPLICATIIONS = 10000;
+  public static final int DEFAULT_MAXIMUM_SYSTEM_APPLICATIIONS = 10000;
   
   @Private
-  public static int UNDEFINED = -1;
+  public static final int UNDEFINED = -1;
   
   @Private
-  public static int MINIMUM_CAPACITY_VALUE = 1;
+  public static final int MINIMUM_CAPACITY_VALUE = 1;
   
   @Private
-  public static int MAXIMUM_CAPACITY_VALUE = 100;
+  public static final int MAXIMUM_CAPACITY_VALUE = 100;
   
   @Private
-  public static int DEFAULT_USER_LIMIT = 100;
+  public static final int DEFAULT_USER_LIMIT = 100;
   
   @Private
-  public static float DEFAULT_USER_LIMIT_FACTOR = 1.0f;
+  public static final float DEFAULT_USER_LIMIT_FACTOR = 1.0f;
   
   @Private
-  public static String DEFAULT_ACL = "*";
+  public static final String DEFAULT_ACL = "*";
 
   @Private public static final String ENABLE_USER_METRICS =
       PREFIX +"user-metrics.enable";

+ 1 - 0
mapreduce/yarn/yarn-server/yarn-server-tests/pom.xml

@@ -11,6 +11,7 @@
 
   <properties>
     <install.file>${project.artifact.file}</install.file>
+    <yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
   </properties>
 
   <dependencies>