Parcourir la source

Merge branch 'trunk' of https://git-wip-us.apache.org/repos/asf/hadoop into HDFS-7240

Xiaoyu Yao il y a 8 ans
Parent
commit
99474212cc
100 fichiers modifiés avec 1561 ajouts et 482 suppressions
  1. 31 5
      dev-support/bin/create-release
  2. 2 2
      hadoop-assemblies/pom.xml
  3. 1 1
      hadoop-build-tools/pom.xml
  4. 2 2
      hadoop-client-modules/hadoop-client-api/pom.xml
  5. 2 2
      hadoop-client-modules/hadoop-client-check-invariants/pom.xml
  6. 2 2
      hadoop-client-modules/hadoop-client-check-test-invariants/pom.xml
  7. 2 2
      hadoop-client-modules/hadoop-client-integration-tests/pom.xml
  8. 2 2
      hadoop-client-modules/hadoop-client-minicluster/pom.xml
  9. 2 2
      hadoop-client-modules/hadoop-client-runtime/pom.xml
  10. 2 2
      hadoop-client-modules/hadoop-client/pom.xml
  11. 1 1
      hadoop-client-modules/pom.xml
  12. 2 2
      hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml
  13. 2 2
      hadoop-cloud-storage-project/pom.xml
  14. 2 2
      hadoop-common-project/hadoop-annotations/pom.xml
  15. 2 2
      hadoop-common-project/hadoop-auth-examples/pom.xml
  16. 7 2
      hadoop-common-project/hadoop-auth/pom.xml
  17. 8 0
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java
  18. 2 2
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
  19. 10 2
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
  20. 28 176
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
  21. 6 5
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/MultiSchemeAuthenticationHandler.java
  22. 197 1
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
  23. 2 0
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RolloverSignerSecretProvider.java
  24. 1 0
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
  25. 1 1
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestMultiSchemeAuthenticationHandler.java
  26. 73 0
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
  27. 26 0
      hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
  28. 4 4
      hadoop-common-project/hadoop-common/pom.xml
  29. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  30. 6 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/AesCtrCryptoCodec.java
  31. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
  32. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
  33. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
  34. 11 11
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java
  35. 18 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
  36. 10 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
  37. 5 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
  38. 15 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
  39. 125 46
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
  40. 39 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
  41. 4 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  42. 7 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
  43. 7 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
  44. 10 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
  45. 5 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
  46. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
  47. 32 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java
  48. 47 16
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java
  49. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
  50. 10 13
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
  51. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
  52. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java
  53. 2 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
  54. 32 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
  55. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java
  56. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
  57. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java
  58. 7 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java
  59. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java
  60. 9 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
  61. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
  62. 35 41
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java
  63. 0 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
  64. 34 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
  65. 1 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
  66. 2 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java
  67. 11 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java
  68. 8 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
  69. 11 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
  70. 20 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
  71. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java
  72. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
  73. 6 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java
  74. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedReadLock.java
  75. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedReadWriteLock.java
  76. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedWriteLock.java
  77. 4 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LogAdapter.java
  78. 30 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
  79. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
  80. 2 3
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_decoder.c
  81. 1 1
      hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_encoder.c
  82. 20 0
      hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
  83. 11 2
      hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
  84. 1 0
      hadoop-common-project/hadoop-common/src/site/markdown/GroupsMapping.md
  85. 4 0
      hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
  86. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java
  87. 82 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
  88. 30 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithOpensslAesCtrCryptoCodec.java
  89. 5 5
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
  90. 18 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractCreateTest.java
  91. 55 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java
  92. 1 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java
  93. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java
  94. 3 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java
  95. 28 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java
  96. 11 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java
  97. 49 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/MetricsTestHelper.java
  98. 83 15
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableRollingAverages.java
  99. 63 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestLdapGroupsMapping.java
  100. 65 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceOperations.java

+ 31 - 5
dev-support/bin/create-release

@@ -50,6 +50,7 @@ function hadoop_abs
   declare obj=$1
   declare obj=$1
   declare dir
   declare dir
   declare fn
   declare fn
+  declare ret
 
 
   if [[ ! -e ${obj} ]]; then
   if [[ ! -e ${obj} ]]; then
     return 1
     return 1
@@ -62,7 +63,8 @@ function hadoop_abs
   fi
   fi
 
 
   dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
   dir=$(cd -P -- "${dir}" >/dev/null 2>/dev/null && pwd -P)
-  if [[ $? = 0 ]]; then
+  ret=$?
+  if [[ ${ret} = 0 ]]; then
     echo "${dir}${fn}"
     echo "${dir}${fn}"
     return 0
     return 0
   fi
   fi
@@ -287,6 +289,7 @@ function usage
   echo "--mvncache=[path]       Path to the maven cache to use"
   echo "--mvncache=[path]       Path to the maven cache to use"
   echo "--native                Also build the native components"
   echo "--native                Also build the native components"
   echo "--rc-label=[label]      Add this label to the builds"
   echo "--rc-label=[label]      Add this label to the builds"
+  echo "--security              Emergency security release"
   echo "--sign                  Use .gnupg dir to sign the artifacts and jars"
   echo "--sign                  Use .gnupg dir to sign the artifacts and jars"
   echo "--version=[version]     Use an alternative version string"
   echo "--version=[version]     Use an alternative version string"
 }
 }
@@ -330,6 +333,9 @@ function option_parse
       --rc-label=*)
       --rc-label=*)
         RC_LABEL=${i#*=}
         RC_LABEL=${i#*=}
       ;;
       ;;
+      --security)
+        SECURITYRELEASE=true
+      ;;
       --sign)
       --sign)
         SIGN=true
         SIGN=true
       ;;
       ;;
@@ -397,6 +403,14 @@ function option_parse
       MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
       MVN_ARGS=("-Dmaven.repo.local=${MVNCACHE}")
     fi
     fi
   fi
   fi
+
+  if [[ "${SECURITYRELEASE}" = true ]]; then
+    if [[ ! -d "${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION}" ]]; then
+      hadoop_error "ERROR: ${BASEDIR}/hadoop-common-project/hadoop-common/src/site/markdown/release/${HADOOP_VERSION} does not exist."
+      hadoop_error "ERROR: This directory and its contents are required to be manually created for a security release."
+      exit 1
+    fi
+  fi
 }
 }
 
 
 function dockermode
 function dockermode
@@ -523,7 +537,7 @@ function makearelease
   big_console_header "Maven Build and Install"
   big_console_header "Maven Build and Install"
 
 
   if [[ "${SIGN}" = true ]]; then
   if [[ "${SIGN}" = true ]]; then
-    signflags=("-Psign" "-Dgpg.useagent=true" -Dgpg.executable="${GPG}")
+    signflags=("-Psign" "-Dgpg.useagent=true" "-Dgpg.executable=${GPG}")
   fi
   fi
 
 
   # Create SRC and BIN tarballs for release,
   # Create SRC and BIN tarballs for release,
@@ -534,6 +548,14 @@ function makearelease
       "${signflags[@]}" \
       "${signflags[@]}" \
       -DskipTests -Dtar $(hadoop_native_flags)
       -DskipTests -Dtar $(hadoop_native_flags)
 
 
+  if [[ "${SECURITYRELEASE}" = true ]]; then
+    DOCFLAGS="-Pdocs"
+    hadoop_error "WARNING: Skipping automatic changelog and release notes generation due to --security"
+  else
+    DOCFLAGS="-Preleasedocs,docs"
+  fi
+
+
   # Create site for release
   # Create site for release
   # we need to do install again so that jdiff and
   # we need to do install again so that jdiff and
   # a few other things get registered in the maven
   # a few other things get registered in the maven
@@ -542,7 +564,8 @@ function makearelease
     "${MVN}" "${MVN_ARGS[@]}" install \
     "${MVN}" "${MVN_ARGS[@]}" install \
       site site:stage \
       site site:stage \
       -DskipTests \
       -DskipTests \
-      -Pdist,src,releasedocs,docs
+      -Pdist,src \
+      "${DOCFLAGS}"
 
 
   big_console_header "Staging the release"
   big_console_header "Staging the release"
 
 
@@ -586,6 +609,7 @@ function makearelease
 function signartifacts
 function signartifacts
 {
 {
   declare i
   declare i
+  declare ret
 
 
   if [[ "${SIGN}" = false ]]; then
   if [[ "${SIGN}" = false ]]; then
     for i in ${ARTIFACTS_DIR}/*; do
     for i in ${ARTIFACTS_DIR}/*; do
@@ -612,7 +636,8 @@ function signartifacts
     ${GPG} --verify --trustdb "${BASEDIR}/target/testkeysdb" \
     ${GPG} --verify --trustdb "${BASEDIR}/target/testkeysdb" \
       "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz.asc" \
       "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz.asc" \
         "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
         "${ARTIFACTS_DIR}/hadoop-${HADOOP_VERSION}${RC_LABEL}.tar.gz"
-    if [[ $? != 0 ]]; then
+    ret=$?
+    if [[ ${ret} != 0 ]]; then
       hadoop_error "ERROR: GPG key is not present in ${PUBKEYFILE}."
       hadoop_error "ERROR: GPG key is not present in ${PUBKEYFILE}."
       hadoop_error "ERROR: This MUST be fixed. Exiting."
       hadoop_error "ERROR: This MUST be fixed. Exiting."
       exit 1
       exit 1
@@ -641,6 +666,7 @@ if [[ "${INDOCKER}" = true || "${DOCKERRAN}" = false ]]; then
   startgpgagent
   startgpgagent
 
 
   makearelease
   makearelease
+  releaseret=$?
 
 
   signartifacts
   signartifacts
 
 
@@ -651,7 +677,7 @@ if [[ "${INDOCKER}" = true ]]; then
   exit $?
   exit $?
 fi
 fi
 
 
-if [[ $? == 0 ]]; then
+if [[ ${releaseret} == 0 ]]; then
   echo
   echo
   echo "Congratulations, you have successfully built the release"
   echo "Congratulations, you have successfully built the release"
   echo "artifacts for Apache Hadoop ${HADOOP_VERSION}${RC_LABEL}"
   echo "artifacts for Apache Hadoop ${HADOOP_VERSION}${RC_LABEL}"

+ 2 - 2
hadoop-assemblies/pom.xml

@@ -23,11 +23,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-assemblies</artifactId>
   <artifactId>hadoop-assemblies</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <name>Apache Hadoop Assemblies</name>
   <name>Apache Hadoop Assemblies</name>
   <description>Apache Hadoop Assemblies</description>
   <description>Apache Hadoop Assemblies</description>
 
 

+ 1 - 1
hadoop-build-tools/pom.xml

@@ -18,7 +18,7 @@
   <parent>
   <parent>
     <artifactId>hadoop-main</artifactId>
     <artifactId>hadoop-main</artifactId>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
   </parent>
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <modelVersion>4.0.0</modelVersion>
   <artifactId>hadoop-build-tools</artifactId>
   <artifactId>hadoop-build-tools</artifactId>

+ 2 - 2
hadoop-client-modules/hadoop-client-api/pom.xml

@@ -18,11 +18,11 @@
 <parent>
 <parent>
    <groupId>org.apache.hadoop</groupId>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-project</artifactId>
    <artifactId>hadoop-project</artifactId>
-   <version>3.0.0-alpha4-SNAPSHOT</version>
+   <version>3.0.0-beta1-SNAPSHOT</version>
    <relativePath>../../hadoop-project</relativePath>
    <relativePath>../../hadoop-project</relativePath>
 </parent>
 </parent>
   <artifactId>hadoop-client-api</artifactId>
   <artifactId>hadoop-client-api</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>jar</packaging>
   <packaging>jar</packaging>
 
 
   <description>Apache Hadoop Client</description>
   <description>Apache Hadoop Client</description>

+ 2 - 2
hadoop-client-modules/hadoop-client-check-invariants/pom.xml

@@ -18,11 +18,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-client-check-invariants</artifactId>
   <artifactId>hadoop-client-check-invariants</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>pom</packaging>
   <packaging>pom</packaging>
 
 
   <description>Enforces our invariants for the api and runtime client modules.</description>
   <description>Enforces our invariants for the api and runtime client modules.</description>

+ 2 - 2
hadoop-client-modules/hadoop-client-check-test-invariants/pom.xml

@@ -18,11 +18,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-client-check-test-invariants</artifactId>
   <artifactId>hadoop-client-check-test-invariants</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>pom</packaging>
   <packaging>pom</packaging>
 
 
   <description>Enforces our invariants for the testing client modules.</description>
   <description>Enforces our invariants for the testing client modules.</description>

+ 2 - 2
hadoop-client-modules/hadoop-client-integration-tests/pom.xml

@@ -18,11 +18,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-client-integration-tests</artifactId>
   <artifactId>hadoop-client-integration-tests</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
 
 
   <description>Checks that we can use the generated artifacts</description>
   <description>Checks that we can use the generated artifacts</description>
   <name>Apache Hadoop Client Packaging Integration Tests</name>
   <name>Apache Hadoop Client Packaging Integration Tests</name>

+ 2 - 2
hadoop-client-modules/hadoop-client-minicluster/pom.xml

@@ -18,11 +18,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-client-minicluster</artifactId>
   <artifactId>hadoop-client-minicluster</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>jar</packaging>
   <packaging>jar</packaging>
 
 
   <description>Apache Hadoop Minicluster for Clients</description>
   <description>Apache Hadoop Minicluster for Clients</description>

+ 2 - 2
hadoop-client-modules/hadoop-client-runtime/pom.xml

@@ -18,11 +18,11 @@
 <parent>
 <parent>
    <groupId>org.apache.hadoop</groupId>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-project</artifactId>
    <artifactId>hadoop-project</artifactId>
-   <version>3.0.0-alpha4-SNAPSHOT</version>
+   <version>3.0.0-beta1-SNAPSHOT</version>
    <relativePath>../../hadoop-project</relativePath>
    <relativePath>../../hadoop-project</relativePath>
 </parent>
 </parent>
   <artifactId>hadoop-client-runtime</artifactId>
   <artifactId>hadoop-client-runtime</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>jar</packaging>
   <packaging>jar</packaging>
 
 
   <description>Apache Hadoop Client</description>
   <description>Apache Hadoop Client</description>

+ 2 - 2
hadoop-client-modules/hadoop-client/pom.xml

@@ -18,11 +18,11 @@
 <parent>
 <parent>
    <groupId>org.apache.hadoop</groupId>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-project-dist</artifactId>
    <artifactId>hadoop-project-dist</artifactId>
-   <version>3.0.0-alpha4-SNAPSHOT</version>
+   <version>3.0.0-beta1-SNAPSHOT</version>
    <relativePath>../../hadoop-project-dist</relativePath>
    <relativePath>../../hadoop-project-dist</relativePath>
 </parent>
 </parent>
   <artifactId>hadoop-client</artifactId>
   <artifactId>hadoop-client</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
 
 
   <description>Apache Hadoop Client aggregation pom with dependencies exposed</description>
   <description>Apache Hadoop Client aggregation pom with dependencies exposed</description>
   <name>Apache Hadoop Client Aggregator</name>
   <name>Apache Hadoop Client Aggregator</name>

+ 1 - 1
hadoop-client-modules/pom.xml

@@ -18,7 +18,7 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-client-modules</artifactId>
   <artifactId>hadoop-client-modules</artifactId>

+ 2 - 2
hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml

@@ -18,11 +18,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-cloud-storage</artifactId>
   <artifactId>hadoop-cloud-storage</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>jar</packaging>
   <packaging>jar</packaging>
 
 
   <description>Apache Hadoop Cloud Storage</description>
   <description>Apache Hadoop Cloud Storage</description>

+ 2 - 2
hadoop-cloud-storage-project/pom.xml

@@ -20,11 +20,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../hadoop-project</relativePath>
     <relativePath>../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-cloud-storage-project</artifactId>
   <artifactId>hadoop-cloud-storage-project</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <description>Apache Hadoop Cloud Storage Project</description>
   <description>Apache Hadoop Cloud Storage Project</description>
   <name>Apache Hadoop Cloud Storage Project</name>
   <name>Apache Hadoop Cloud Storage Project</name>
   <packaging>pom</packaging>
   <packaging>pom</packaging>

+ 2 - 2
hadoop-common-project/hadoop-annotations/pom.xml

@@ -20,11 +20,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-annotations</artifactId>
   <artifactId>hadoop-annotations</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <description>Apache Hadoop Annotations</description>
   <description>Apache Hadoop Annotations</description>
   <name>Apache Hadoop Annotations</name>
   <name>Apache Hadoop Annotations</name>
   <packaging>jar</packaging>
   <packaging>jar</packaging>

+ 2 - 2
hadoop-common-project/hadoop-auth-examples/pom.xml

@@ -20,11 +20,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-auth-examples</artifactId>
   <artifactId>hadoop-auth-examples</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>war</packaging>
   <packaging>war</packaging>
 
 
   <name>Apache Hadoop Auth Examples</name>
   <name>Apache Hadoop Auth Examples</name>

+ 7 - 2
hadoop-common-project/hadoop-auth/pom.xml

@@ -20,11 +20,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project</artifactId>
     <artifactId>hadoop-project</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project</relativePath>
     <relativePath>../../hadoop-project</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-auth</artifactId>
   <artifactId>hadoop-auth</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>jar</packaging>
   <packaging>jar</packaging>
 
 
   <name>Apache Hadoop Auth</name>
   <name>Apache Hadoop Auth</name>
@@ -181,6 +181,11 @@
       <version>${apacheds.version}</version>
       <version>${apacheds.version}</version>
       <scope>test</scope>
       <scope>test</scope>
     </dependency>
     </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <scope>compile</scope>
+    </dependency>
   </dependencies>
   </dependencies>
 
 
   <build>
   <build>

+ 8 - 0
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java

@@ -14,6 +14,8 @@
 package org.apache.hadoop.security.authentication.client;
 package org.apache.hadoop.security.authentication.client;
 
 
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 import java.io.FileNotFoundException;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.IOException;
@@ -59,6 +61,8 @@ import java.util.Map;
  * </pre>
  * </pre>
  */
  */
 public class AuthenticatedURL {
 public class AuthenticatedURL {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(AuthenticatedURL.class);
 
 
   /**
   /**
    * Name of the HTTP cookie used for the authentication token between the client and the server.
    * Name of the HTTP cookie used for the authentication token between the client and the server.
@@ -265,15 +269,19 @@ public class AuthenticatedURL {
               value = value.substring(0, separator);
               value = value.substring(0, separator);
             }
             }
             if (value.length() > 0) {
             if (value.length() > 0) {
+              LOG.trace("Setting token value to {} ({}), resp={}", value,
+                  token, respCode);
               token.set(value);
               token.set(value);
             }
             }
           }
           }
         }
         }
       }
       }
     } else if (respCode == HttpURLConnection.HTTP_NOT_FOUND) {
     } else if (respCode == HttpURLConnection.HTTP_NOT_FOUND) {
+      LOG.trace("Setting token value to null ({}), resp={}", token, respCode);
       token.set(null);
       token.set(null);
       throw new FileNotFoundException(conn.getURL().toString());
       throw new FileNotFoundException(conn.getURL().toString());
     } else {
     } else {
+      LOG.trace("Setting token value to null ({}), resp={}", token, respCode);
       token.set(null);
       token.set(null);
       throw new AuthenticationException("Authentication failed" +
       throw new AuthenticationException("Authentication failed" +
           ", URL: " + conn.getURL() +
           ", URL: " + conn.getURL() +

+ 2 - 2
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java

@@ -293,10 +293,10 @@ public class KerberosAuthenticator implements Authenticator {
             GSSManager gssManager = GSSManager.getInstance();
             GSSManager gssManager = GSSManager.getInstance();
             String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
             String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP",
                 KerberosAuthenticator.this.url.getHost());
                 KerberosAuthenticator.this.url.getHost());
-            Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+            Oid oid = KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID;
             GSSName serviceName = gssManager.createName(servicePrincipal,
             GSSName serviceName = gssManager.createName(servicePrincipal,
                                                         oid);
                                                         oid);
-            oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+            oid = KerberosUtil.GSS_KRB5_MECH_OID;
             gssContext = gssManager.createContext(serviceName, oid, null,
             gssContext = gssManager.createContext(serviceName, oid, null,
                                                   GSSContext.DEFAULT_LIFETIME);
                                                   GSSContext.DEFAULT_LIFETIME);
             gssContext.requestCredDeleg(true);
             gssContext.requestCredDeleg(true);

+ 10 - 2
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java

@@ -516,6 +516,10 @@ public class AuthenticationFilter implements Filter {
       AuthenticationToken token;
       AuthenticationToken token;
       try {
       try {
         token = getToken(httpRequest);
         token = getToken(httpRequest);
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Got token {} from httpRequest {}", token,
+              getRequestURL(httpRequest));
+        }
       }
       }
       catch (AuthenticationException ex) {
       catch (AuthenticationException ex) {
         LOG.warn("AuthenticationToken ignored: " + ex.getMessage());
         LOG.warn("AuthenticationToken ignored: " + ex.getMessage());
@@ -526,8 +530,8 @@ public class AuthenticationFilter implements Filter {
       if (authHandler.managementOperation(token, httpRequest, httpResponse)) {
       if (authHandler.managementOperation(token, httpRequest, httpResponse)) {
         if (token == null) {
         if (token == null) {
           if (LOG.isDebugEnabled()) {
           if (LOG.isDebugEnabled()) {
-            LOG.debug("Request [{}] triggering authentication",
-                getRequestURL(httpRequest));
+            LOG.debug("Request [{}] triggering authentication. handler: {}",
+                getRequestURL(httpRequest), authHandler.getClass());
           }
           }
           token = authHandler.authenticate(httpRequest, httpResponse);
           token = authHandler.authenticate(httpRequest, httpResponse);
           if (token != null && token != AuthenticationToken.ANONYMOUS) {
           if (token != null && token != AuthenticationToken.ANONYMOUS) {
@@ -588,6 +592,10 @@ public class AuthenticationFilter implements Filter {
           doFilter(filterChain, httpRequest, httpResponse);
           doFilter(filterChain, httpRequest, httpResponse);
         }
         }
       } else {
       } else {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("managementOperation returned false for request {}."
+                  + " token: {}", getRequestURL(httpRequest), token);
+        }
         unauthorizedResponse = false;
         unauthorizedResponse = false;
       }
       }
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {

+ 28 - 176
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java

@@ -28,31 +28,20 @@ import org.slf4j.LoggerFactory;
 
 
 import javax.security.auth.Subject;
 import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.kerberos.KerberosPrincipal;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
+import javax.security.auth.kerberos.KeyTab;
 import javax.servlet.ServletException;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
 
 
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
-import java.net.InetAddress;
+import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.Set;
 import java.util.Set;
 import java.util.regex.Pattern;
 import java.util.regex.Pattern;
 
 
-import com.google.common.collect.HashMultimap;
-
-import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
-
 /**
 /**
  * The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO
  * The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO
  * authentication mechanism for HTTP.
  * authentication mechanism for HTTP.
@@ -76,60 +65,6 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
   public static final Logger LOG = LoggerFactory.getLogger(
   public static final Logger LOG = LoggerFactory.getLogger(
       KerberosAuthenticationHandler.class);
       KerberosAuthenticationHandler.class);
 
 
-  /**
-   * Kerberos context configuration for the JDK GSS library.
-   */
-  private static class KerberosConfiguration extends Configuration {
-    private String keytab;
-    private String principal;
-
-    public KerberosConfiguration(String keytab, String principal) {
-      this.keytab = keytab;
-      this.principal = principal;
-    }
-
-    @Override
-    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-      Map<String, String> options = new HashMap<String, String>();
-      if (IBM_JAVA) {
-        options.put("useKeytab",
-            keytab.startsWith("file://") ? keytab : "file://" + keytab);
-        options.put("principal", principal);
-        options.put("credsType", "acceptor");
-      } else {
-        options.put("keyTab", keytab);
-        options.put("principal", principal);
-        options.put("useKeyTab", "true");
-        options.put("storeKey", "true");
-        options.put("doNotPrompt", "true");
-        options.put("useTicketCache", "true");
-        options.put("renewTGT", "true");
-        options.put("isInitiator", "false");
-      }
-      options.put("refreshKrb5Config", "true");
-      String ticketCache = System.getenv("KRB5CCNAME");
-      if (ticketCache != null) {
-        if (IBM_JAVA) {
-          options.put("useDefaultCcache", "true");
-          // The first value searched when "useDefaultCcache" is used.
-          System.setProperty("KRB5CCNAME", ticketCache);
-          options.put("renewTGT", "true");
-          options.put("credsType", "both");
-        } else {
-          options.put("ticketCache", ticketCache);
-        }
-      }
-      if (LOG.isDebugEnabled()) {
-        options.put("debug", "true");
-      }
-
-      return new AppConfigurationEntry[]{
-          new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
-              AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-              options), };
-    }
-  }
-
   /**
   /**
    * Constant that identifies the authentication mechanism.
    * Constant that identifies the authentication mechanism.
    */
    */
@@ -157,43 +92,6 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
   private String keytab;
   private String keytab;
   private GSSManager gssManager;
   private GSSManager gssManager;
   private Subject serverSubject = new Subject();
   private Subject serverSubject = new Subject();
-  private List<LoginContext> loginContexts = new ArrayList<LoginContext>();
-  /**
-   * HADOOP-10158 added support of running HTTP with multiple SPNs
-   * but implicit requirements is that they must come from the SAME local realm.
-   *
-   * This is a regression for use cases where HTTP service needs to run with
-   * with SPN from foreign realm, which is not supported after HADOOP-10158.
-   *
-   * HADOOP-13565 brings back support of SPNs from foreign realms
-   * without dependency on specific Kerberos domain_realm mapping mechanism.
-   *
-   * There are several reasons for not using native Kerberos domain_realm
-   * mapping:
-   * 1. As commented in KerberosUtil#getDomainRealm(), JDK's
-   * domain_realm mapping routines are private to the security.krb5
-   * package. As a result, KerberosUtil#getDomainRealm() always return local
-   * realm.
-   *
-   * 2. Server krb5.conf is not the only place that contains the domain_realm
-   * mapping in real deployment. Based on MIT KDC document here:
-   * https://web.mit.edu/kerberos/krb5-1.13/doc/admin/realm_config.html, the
-   * Kerberos domain_realm mapping can be implemented in one of the three
-   * mechanisms:
-   * 1) Server host-based krb5.conf on HTTP server
-   * 2) KDC-based krb5.conf on KDC server
-   * 3) DNS-based with TXT record with _kerberos prefix to the hostname.
-   *
-   * We choose to maintain domain_realm mapping based on HTTP principals
-   * from keytab. The mapping is built at login time with HTTP principals
-   * key-ed by server name and is used later to
-   * looked up SPNs based on server name from request for authentication.
-   * The multi-map implementation allows SPNs of same server from
-   * different realms.
-   *
-   */
-  private HashMultimap<String, String> serverPrincipalMap =
-      HashMultimap.create();
 
 
   /**
   /**
    * Creates a Kerberos SPNEGO authentication handler with the default
    * Creates a Kerberos SPNEGO authentication handler with the default
@@ -236,7 +134,8 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
       if (keytab == null || keytab.trim().length() == 0) {
       if (keytab == null || keytab.trim().length() == 0) {
         throw new ServletException("Keytab not defined in configuration");
         throw new ServletException("Keytab not defined in configuration");
       }
       }
-      if (!new File(keytab).exists()) {
+      File keytabFile = new File(keytab);
+      if (!keytabFile.exists()) {
         throw new ServletException("Keytab does not exist: " + keytab);
         throw new ServletException("Keytab does not exist: " + keytab);
       }
       }
       
       
@@ -252,39 +151,19 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
       } else {
       } else {
         spnegoPrincipals = new String[]{principal};
         spnegoPrincipals = new String[]{principal};
       }
       }
-
+      KeyTab keytabInstance = KeyTab.getInstance(keytabFile);
+      serverSubject.getPrivateCredentials().add(keytabInstance);
+      for (String spnegoPrincipal : spnegoPrincipals) {
+        Principal krbPrincipal = new KerberosPrincipal(spnegoPrincipal);
+        LOG.info("Using keytab {}, for principal {}",
+            keytab, krbPrincipal);
+        serverSubject.getPrincipals().add(krbPrincipal);
+      }
       String nameRules = config.getProperty(NAME_RULES, null);
       String nameRules = config.getProperty(NAME_RULES, null);
       if (nameRules != null) {
       if (nameRules != null) {
         KerberosName.setRules(nameRules);
         KerberosName.setRules(nameRules);
       }
       }
-      
-      for (String spnegoPrincipal : spnegoPrincipals) {
-        LOG.info("Login using keytab {}, for principal {}",
-            keytab, spnegoPrincipal);
-        final KerberosConfiguration kerberosConfiguration =
-            new KerberosConfiguration(keytab, spnegoPrincipal);
-        final LoginContext loginContext =
-            new LoginContext("", serverSubject, null, kerberosConfiguration);
-        try {
-          loginContext.login();
-        } catch (LoginException le) {
-          LOG.warn("Failed to login as [{}]", spnegoPrincipal, le);
-          throw new AuthenticationException(le);          
-        }
-        loginContexts.add(loginContext);
-        KerberosName kerbName = new KerberosName(spnegoPrincipal);
-        if (kerbName.getHostName() != null
-            && kerbName.getServiceName() != null
-            && kerbName.getServiceName().equals("HTTP")) {
-          boolean added = serverPrincipalMap.put(kerbName.getHostName(),
-              spnegoPrincipal);
-          LOG.info("Map server: {} to principal: [{}], added = {}",
-              kerbName.getHostName(), spnegoPrincipal, added);
-        } else {
-          LOG.warn("HTTP principal: [{}] is invalid for SPNEGO!",
-              spnegoPrincipal);
-        }
-      }
+
       try {
       try {
         gssManager = Subject.doAs(serverSubject,
         gssManager = Subject.doAs(serverSubject,
             new PrivilegedExceptionAction<GSSManager>() {
             new PrivilegedExceptionAction<GSSManager>() {
@@ -310,14 +189,6 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
   public void destroy() {
   public void destroy() {
     keytab = null;
     keytab = null;
     serverSubject = null;
     serverSubject = null;
-    for (LoginContext loginContext : loginContexts) {
-      try {
-        loginContext.logout();
-      } catch (LoginException ex) {
-        LOG.warn(ex.getMessage(), ex);
-      }
-    }
-    loginContexts.clear();
   }
   }
 
 
   /**
   /**
@@ -409,40 +280,20 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
           KerberosAuthenticator.NEGOTIATE.length()).trim();
           KerberosAuthenticator.NEGOTIATE.length()).trim();
       final Base64 base64 = new Base64(0);
       final Base64 base64 = new Base64(0);
       final byte[] clientToken = base64.decode(authorization);
       final byte[] clientToken = base64.decode(authorization);
-      final String serverName = InetAddress.getByName(request.getServerName())
-                                           .getCanonicalHostName();
       try {
       try {
+        final String serverPrincipal =
+            KerberosUtil.getTokenServerName(clientToken);
+        if (!serverPrincipal.startsWith("HTTP/")) {
+          throw new IllegalArgumentException(
+              "Invalid server principal " + serverPrincipal +
+              "decoded from client request");
+        }
         token = Subject.doAs(serverSubject,
         token = Subject.doAs(serverSubject,
             new PrivilegedExceptionAction<AuthenticationToken>() {
             new PrivilegedExceptionAction<AuthenticationToken>() {
-              private Set<String> serverPrincipals =
-                  serverPrincipalMap.get(serverName);
               @Override
               @Override
               public AuthenticationToken run() throws Exception {
               public AuthenticationToken run() throws Exception {
-                if (LOG.isTraceEnabled()) {
-                  LOG.trace("SPNEGO with server principals: {} for {}",
-                      serverPrincipals.toString(), serverName);
-                }
-                AuthenticationToken token = null;
-                Exception lastException = null;
-                for (String serverPrincipal : serverPrincipals) {
-                  try {
-                    token = runWithPrincipal(serverPrincipal, clientToken,
-                        base64, response);
-                  } catch (Exception ex) {
-                    lastException = ex;
-                    LOG.trace("Auth {} failed with {}", serverPrincipal, ex);
-                  } finally {
-                      if (token != null) {
-                        LOG.trace("Auth {} successfully", serverPrincipal);
-                        break;
-                    }
-                  }
-                }
-                if (token != null) {
-                  return token;
-                } else {
-                  throw new AuthenticationException(lastException);
-                }
+                return runWithPrincipal(serverPrincipal, clientToken,
+                      base64, response);
               }
               }
             });
             });
       } catch (PrivilegedActionException ex) {
       } catch (PrivilegedActionException ex) {
@@ -451,6 +302,8 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
         } else {
         } else {
           throw new AuthenticationException(ex.getException());
           throw new AuthenticationException(ex.getException());
         }
         }
+      } catch (Exception ex) {
+        throw new AuthenticationException(ex);
       }
       }
     }
     }
     return token;
     return token;
@@ -458,8 +311,7 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
 
 
   private AuthenticationToken runWithPrincipal(String serverPrincipal,
   private AuthenticationToken runWithPrincipal(String serverPrincipal,
       byte[] clientToken, Base64 base64, HttpServletResponse response) throws
       byte[] clientToken, Base64 base64, HttpServletResponse response) throws
-      IOException, AuthenticationException, ClassNotFoundException,
-      GSSException, IllegalAccessException, NoSuchFieldException {
+      IOException, GSSException {
     GSSContext gssContext = null;
     GSSContext gssContext = null;
     GSSCredential gssCreds = null;
     GSSCredential gssCreds = null;
     AuthenticationToken token = null;
     AuthenticationToken token = null;
@@ -467,11 +319,11 @@ public class KerberosAuthenticationHandler implements AuthenticationHandler {
       LOG.trace("SPNEGO initiated with server principal [{}]", serverPrincipal);
       LOG.trace("SPNEGO initiated with server principal [{}]", serverPrincipal);
       gssCreds = this.gssManager.createCredential(
       gssCreds = this.gssManager.createCredential(
           this.gssManager.createName(serverPrincipal,
           this.gssManager.createName(serverPrincipal,
-              KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL")),
+              KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID),
           GSSCredential.INDEFINITE_LIFETIME,
           GSSCredential.INDEFINITE_LIFETIME,
           new Oid[]{
           new Oid[]{
-              KerberosUtil.getOidInstance("GSS_SPNEGO_MECH_OID"),
-              KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID")},
+              KerberosUtil.GSS_SPNEGO_MECH_OID,
+              KerberosUtil.GSS_KRB5_MECH_OID },
           GSSCredential.ACCEPT_ONLY);
           GSSCredential.ACCEPT_ONLY);
       gssContext = this.gssManager.createContext(gssCreds);
       gssContext = this.gssManager.createContext(gssCreds);
       byte[] serverToken = gssContext.acceptSecContext(clientToken, 0,
       byte[] serverToken = gssContext.acceptSecContext(clientToken, 0,

+ 6 - 5
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/MultiSchemeAuthenticationHandler.java

@@ -186,11 +186,12 @@ public class MultiSchemeAuthenticationHandler implements
     String authorization =
     String authorization =
         request.getHeader(HttpConstants.AUTHORIZATION_HEADER);
         request.getHeader(HttpConstants.AUTHORIZATION_HEADER);
     if (authorization != null) {
     if (authorization != null) {
-      for (String scheme : schemeToAuthHandlerMapping.keySet()) {
-        if (AuthenticationHandlerUtil.matchAuthScheme(scheme, authorization)) {
-          AuthenticationHandler handler =
-              schemeToAuthHandlerMapping.get(scheme);
-          AuthenticationToken token = handler.authenticate(request, response);
+      for (Map.Entry<String, AuthenticationHandler> entry :
+          schemeToAuthHandlerMapping.entrySet()) {
+        if (AuthenticationHandlerUtil.matchAuthScheme(
+            entry.getKey(), authorization)) {
+          AuthenticationToken token =
+              entry.getValue().authenticate(request, response);
           logger.trace("Token generated with type {}", token.getType());
           logger.trace("Token generated with type {}", token.getType());
           return token;
           return token;
         }
         }

+ 197 - 1
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

@@ -21,15 +21,20 @@ import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 import java.lang.reflect.Field;
 import java.lang.reflect.Field;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.lang.reflect.Method;
 import java.net.InetAddress;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.net.UnknownHostException;
+import java.nio.ByteBuffer;
+import java.nio.charset.IllegalCharsetNameException;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.List;
 import java.util.Locale;
 import java.util.Locale;
+import java.util.NoSuchElementException;
 import java.util.Set;
 import java.util.Set;
 import java.util.regex.Pattern;
 import java.util.regex.Pattern;
 
 
@@ -50,7 +55,24 @@ public class KerberosUtil {
       ? "com.ibm.security.auth.module.Krb5LoginModule"
       ? "com.ibm.security.auth.module.Krb5LoginModule"
       : "com.sun.security.auth.module.Krb5LoginModule";
       : "com.sun.security.auth.module.Krb5LoginModule";
   }
   }
-  
+
+  public static final Oid GSS_SPNEGO_MECH_OID =
+      getNumericOidInstance("1.3.6.1.5.5.2");
+  public static final Oid GSS_KRB5_MECH_OID =
+      getNumericOidInstance("1.2.840.113554.1.2.2");
+  public static final Oid NT_GSS_KRB5_PRINCIPAL_OID =
+      getNumericOidInstance("1.2.840.113554.1.2.2.1");
+
+  // numeric oids will never generate a GSSException for a malformed oid.
+  // use to initialize statics.
+  private static Oid getNumericOidInstance(String oidName) {
+    try {
+      return new Oid(oidName);
+    } catch (GSSException ex) {
+      throw new IllegalArgumentException(ex);
+    }
+  }
+
   public static Oid getOidInstance(String oidName) 
   public static Oid getOidInstance(String oidName) 
       throws ClassNotFoundException, GSSException, NoSuchFieldException,
       throws ClassNotFoundException, GSSException, NoSuchFieldException,
       IllegalAccessException {
       IllegalAccessException {
@@ -255,4 +277,178 @@ public class KerberosUtil {
   public static boolean hasKerberosTicket(Subject subject) {
   public static boolean hasKerberosTicket(Subject subject) {
     return !subject.getPrivateCredentials(KerberosTicket.class).isEmpty();
     return !subject.getPrivateCredentials(KerberosTicket.class).isEmpty();
   }
   }
+
+  /**
+   * Extract the TGS server principal from the given gssapi kerberos or spnego
+   * wrapped token.
+   * @param rawToken bytes of the gss token
+   * @return String of server principal
+   * @throws IllegalArgumentException if token is undecodable
+   */
+  public static String getTokenServerName(byte[] rawToken) {
+    // subsequent comments include only relevant portions of the kerberos
+    // DER encoding that will be extracted.
+    DER token = new DER(rawToken);
+    // InitialContextToken ::= [APPLICATION 0] IMPLICIT SEQUENCE {
+    //     mech   OID
+    //     mech-token  (NegotiationToken or InnerContextToken)
+    // }
+    DER oid = token.next();
+    if (oid.equals(DER.SPNEGO_MECH_OID)) {
+      // NegotiationToken ::= CHOICE {
+      //     neg-token-init[0] NegTokenInit
+      // }
+      // NegTokenInit ::= SEQUENCE {
+      //     mech-token[2]     InitialContextToken
+      // }
+      token = token.next().get(0xa0, 0x30, 0xa2, 0x04).next();
+      oid = token.next();
+    }
+    if (!oid.equals(DER.KRB5_MECH_OID)) {
+      throw new IllegalArgumentException("Malformed gss token");
+    }
+    // InnerContextToken ::= {
+    //     token-id[1]
+    //     AP-REQ
+    // }
+    if (token.next().getTag() != 1) {
+      throw new IllegalArgumentException("Not an AP-REQ token");
+    }
+    // AP-REQ ::= [APPLICATION 14] SEQUENCE {
+    //     ticket[3]      Ticket
+    // }
+    DER ticket = token.next().get(0x6e, 0x30, 0xa3, 0x61, 0x30);
+    // Ticket ::= [APPLICATION 1] SEQUENCE {
+    //     realm[1]       String
+    //     sname[2]       PrincipalName
+    // }
+    // PrincipalName ::= SEQUENCE {
+    //     name-string[1] SEQUENCE OF String
+    // }
+    String realm = ticket.get(0xa1, 0x1b).getAsString();
+    DER names = ticket.get(0xa2, 0x30, 0xa1, 0x30);
+    StringBuilder sb = new StringBuilder();
+    while (names.hasNext()) {
+      if (sb.length() > 0) {
+        sb.append('/');
+      }
+      sb.append(names.next().getAsString());
+    }
+    return sb.append('@').append(realm).toString();
+  }
+
+  // basic ASN.1 DER decoder to traverse encoded byte arrays.
+  private static class DER implements Iterator<DER> {
+    static final DER SPNEGO_MECH_OID = getDER(GSS_SPNEGO_MECH_OID);
+    static final DER KRB5_MECH_OID = getDER(GSS_KRB5_MECH_OID);
+
+    private static DER getDER(Oid oid) {
+      try {
+        return new DER(oid.getDER());
+      } catch (GSSException ex) {
+        // won't happen.  a proper OID is encodable.
+        throw new IllegalArgumentException(ex);
+      }
+    }
+
+    private final int tag;
+    private final ByteBuffer bb;
+
+    DER(byte[] buf) {
+      this(ByteBuffer.wrap(buf));
+    }
+
+    DER(ByteBuffer srcbb) {
+      tag = srcbb.get() & 0xff;
+      int length = readLength(srcbb);
+      bb = srcbb.slice();
+      bb.limit(length);
+      srcbb.position(srcbb.position() + length);
+    }
+
+    int getTag() {
+      return tag;
+    }
+
+    // standard ASN.1 encoding.
+    private static int readLength(ByteBuffer bb) {
+      int length = bb.get();
+      if ((length & (byte)0x80) != 0) {
+        int varlength = length & 0x7f;
+        length = 0;
+        for (int i=0; i < varlength; i++) {
+          length = (length << 8) | (bb.get() & 0xff);
+        }
+      }
+      return length;
+    }
+
+    DER choose(int subtag) {
+      while (hasNext()) {
+        DER der = next();
+        if (der.getTag() == subtag) {
+          return der;
+        }
+      }
+      return null;
+    }
+
+    DER get(int... tags) {
+      DER der = this;
+      for (int i=0; i < tags.length; i++) {
+        int expectedTag = tags[i];
+        // lookup for exact match, else scan if it's sequenced.
+        if (der.getTag() != expectedTag) {
+          der = der.hasNext() ? der.choose(expectedTag) : null;
+        }
+        if (der == null) {
+          StringBuilder sb = new StringBuilder("Tag not found:");
+          for (int ii=0; ii <= i; ii++) {
+            sb.append(" 0x").append(Integer.toHexString(tags[ii]));
+          }
+          throw new IllegalStateException(sb.toString());
+        }
+      }
+      return der;
+    }
+
+    String getAsString() {
+      try {
+        return new String(bb.array(), bb.arrayOffset() + bb.position(),
+            bb.remaining(), "UTF-8");
+      } catch (UnsupportedEncodingException e) {
+        throw new IllegalCharsetNameException("UTF-8"); // won't happen.
+      }
+    }
+
+    @Override
+    public int hashCode() {
+      return 31 * tag + bb.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      return (o instanceof DER) &&
+          tag == ((DER)o).tag && bb.equals(((DER)o).bb);
+    }
+
+    @Override
+    public boolean hasNext() {
+      // it's a sequence or an embedded octet.
+      return ((tag & 0x30) != 0 || tag == 0x04) && bb.hasRemaining();
+    }
+
+    @Override
+    public DER next() {
+      if (!hasNext()) {
+        throw new NoSuchElementException();
+      }
+      return new DER(bb);
+    }
+
+    @Override
+    public String toString() {
+      return "[tag=0x"+Integer.toHexString(tag)+" bb="+bb+"]";
+    }
+  }
 }
 }

+ 2 - 0
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RolloverSignerSecretProvider.java

@@ -18,6 +18,7 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeUnit;
 import javax.servlet.ServletContext;
 import javax.servlet.ServletContext;
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
@@ -38,6 +39,7 @@ import org.slf4j.LoggerFactory;
 public abstract class RolloverSignerSecretProvider
 public abstract class RolloverSignerSecretProvider
     extends SignerSecretProvider {
     extends SignerSecretProvider {
 
 
+  @VisibleForTesting
   static Logger LOG = LoggerFactory.getLogger(
   static Logger LOG = LoggerFactory.getLogger(
     RolloverSignerSecretProvider.class);
     RolloverSignerSecretProvider.class);
   /**
   /**

+ 1 - 0
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java

@@ -1232,6 +1232,7 @@ public class TestAuthenticationFilter {
       String tokenSigned = signer.sign(token.toString());
       String tokenSigned = signer.sign(token.toString());
       Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
       Cookie cookie = new Cookie(AuthenticatedURL.AUTH_COOKIE, tokenSigned);
       Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
       Mockito.when(request.getCookies()).thenReturn(new Cookie[]{cookie});
+      Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer());
 
 
       filter.doFilter(request, response, chain);
       filter.doFilter(request, response, chain);
 
 

+ 1 - 1
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestMultiSchemeAuthenticationHandler.java

@@ -182,7 +182,7 @@ public class TestMultiSchemeAuthenticationHandler
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {
       // Expected
       // Expected
     } catch (Exception ex) {
     } catch (Exception ex) {
-      Assert.fail();
+      Assert.fail("Wrong exception :"+ex);
     }
     }
   }
   }
 
 

+ 73 - 0
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java

@@ -16,11 +16,14 @@
  */
  */
 package org.apache.hadoop.security.authentication.util;
 package org.apache.hadoop.security.authentication.util;
 
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
 import java.net.UnknownHostException;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
+import java.util.Base64;
 import java.util.List;
 import java.util.List;
 import java.util.Locale;
 import java.util.Locale;
 import java.util.regex.Pattern;
 import java.util.regex.Pattern;
@@ -178,4 +181,74 @@ public class TestKerberosUtil {
     keytab.addKeytabEntries(lstEntries);
     keytab.addKeytabEntries(lstEntries);
     keytab.store(new File(testKeytab));
     keytab.store(new File(testKeytab));
   }
   }
+
+  @Test
+  public void testServicePrincipalDecode() throws Exception {
+    // test decoding standard krb5 tokens and spnego wrapped tokens
+    // for principals with the default realm, and a non-default realm.
+    String krb5Default =
+        "YIIB2AYJKoZIhvcSAQICAQBuggHHMIIBw6ADAgEFoQMCAQ6iBwMFACAAAACj" +
+        "gethgegwgeWgAwIBBaENGwtFWEFNUExFLkNPTaIcMBqgAwIBAKETMBEbBEhU" +
+        "VFAbCWxvY2FsaG9zdKOBsDCBraADAgERoQMCAQGigaAEgZ23QsT1+16T23ni" +
+        "JI1uFRU0FN13hhPSLAl4+oAqpV5s1Z6E+G2VKGx2+rUF21utOdlwUK/J5CKF" +
+        "HxM4zfNsmzRFhdk5moJW6AWHuRqGJ9hrZgTxA2vOBIn/tju+n/vJVEcUvW0f" +
+        "DiPfjPIPFOlc7V9GlWvZFyr5NMJSFwspKJXYh/FSNpSVTecfGskjded9TZzR" +
+        "2tOVzgpjFvAu/DETpIG/MIG8oAMCARGigbQEgbGWnbKlV1oo7/gzT4hi/Q41" +
+        "ff2luDnSxADEmo6M8LC42scsYMLNgU4iLJhuf4YLb7ueh790HrbB6Kdes71/" +
+        "gSBiLI2/mn3BqNE43gt94dQ8VFBix4nJCsYnuORYxLJjRSJE+3ImJNsSjqaf" +
+        "GRI0sp9w3hc4IVm8afb3Ggm6PgRIyyGNdTzK/p03v+zA01MJh3htuOgLKUOV" +
+        "z002pHnGzu/purZ5mOyaQT12vHxJ2T+Cwi8=";
+
+    String krb5Other =
+        "YIIB2AYJKoZIhvcSAQICAQBuggHHMIIBw6ADAgEFoQMCAQ6iBwMFACAAAACj" +
+        "gethgegwgeWgAwIBBaENGwtBQkNERUZHLk9SR6IcMBqgAwIBAKETMBEbBEhU" +
+        "VFAbCW90aGVyaG9zdKOBsDCBraADAgERoQMCAQGigaAEgZ23QsT1+16T23ni" +
+        "JI1uFRU0FN13hhPSLAl4+oAqpV5s1Z6E+G2VKGx2+rUF21utOdlwUK/J5CKF" +
+        "HxM4zfNsmzRFhdk5moJW6AWHuRqGJ9hrZgTxA2vOBIn/tju+n/vJVEcUvW0f" +
+        "DiPfjPIPFOlc7V9GlWvZFyr5NMJSFwspKJXYh/FSNpSVTecfGskjded9TZzR" +
+        "2tOVzgpjFvAu/DETpIG/MIG8oAMCARGigbQEgbGWnbKlV1oo7/gzT4hi/Q41" +
+        "ff2luDnSxADEmo6M8LC42scsYMLNgU4iLJhuf4YLb7ueh790HrbB6Kdes71/" +
+        "gSBiLI2/mn3BqNE43gt94dQ8VFBix4nJCsYnuORYxLJjRSJE+3ImJNsSjqaf" +
+        "GRI0sp9w3hc4IVm8afb3Ggm6PgRIyyGNdTzK/p03v+zA01MJh3htuOgLKUOV" +
+        "z002pHnGzu/purZ5mOyaQT12vHxJ2T+Cwi8K";
+
+    String spnegoDefault =
+        "YIICCQYGKwYBBQUCoIIB/TCCAfmgDTALBgkqhkiG9xIBAgKhBAMCAXaiggHg" +
+        "BIIB3GCCAdgGCSqGSIb3EgECAgEAboIBxzCCAcOgAwIBBaEDAgEOogcDBQAg" +
+        "AAAAo4HrYYHoMIHloAMCAQWhDRsLRVhBTVBMRS5DT02iHDAaoAMCAQChEzAR" +
+        "GwRIVFRQGwlsb2NhbGhvc3SjgbAwga2gAwIBEaEDAgEBooGgBIGdBWbzvV1R" +
+        "Iqb7WuPIW3RTkFtwjU9P/oFAbujGPd8h/qkCszroNdvHhUkPntuOqhFBntMo" +
+        "bilgTqNEdDUGvBbfkJaRklNGqT/IAOUV6tlGpBUCXquR5UdPzPpUvGZiVRUu" +
+        "FGH5DGGHvYF1CwXPp2l1Jq373vSLQ1kBl6TXl+aKLsZYhVUjKvE7Auippclb" +
+        "hv/GGGex/TcjNH48k47OQaSBvzCBvKADAgERooG0BIGxeChp3TMVtWbCdFGo" +
+        "YL+35r2762j+OEwZRfcj4xCK7j0mUTcxLtyVGxyY9Ax+ljl5gTwzRhXcJq0T" +
+        "TjiQwKJckeZ837mXQAURbfJpFc3VLAXGfNkMFCR7ZkWpGA1Vzc3PeUNczn2D" +
+        "Lpu8sme55HFFQDi/0akW6Lwv/iCrpwIkZPyZPjaEmwLVALu4E8m0Ka3fJkPV" +
+        "GAhamg9OQpuREIK0pCk3ZSHhJz8qMwduzRZHc4vN";
+
+    String spnegoOther =
+        "YIICCQYGKwYBBQUCoIIB/TCCAfmgDTALBgkqhkiG9xIBAgKhBAMCAXaiggHg" +
+        "BIIB3GCCAdgGCSqGSIb3EgECAgEAboIBxzCCAcOgAwIBBaEDAgEOogcDBQAg" +
+        "AAAAo4HrYYHoMIHloAMCAQWhDRsLQUJDREVGRy5PUkeiHDAaoAMCAQChEzAR" +
+        "GwRIVFRQGwlvdGhlcmhvc3SjgbAwga2gAwIBEaEDAgEBooGgBIGdBWbzvV1R" +
+        "Iqb7WuPIW3RTkFtwjU9P/oFAbujGPd8h/qkCszroNdvHhUkPntuOqhFBntMo" +
+        "bilgTqNEdDUGvBbfkJaRklNGqT/IAOUV6tlGpBUCXquR5UdPzPpUvGZiVRUu" +
+        "FGH5DGGHvYF1CwXPp2l1Jq373vSLQ1kBl6TXl+aKLsZYhVUjKvE7Auippclb" +
+        "hv/GGGex/TcjNH48k47OQaSBvzCBvKADAgERooG0BIGxeChp3TMVtWbCdFGo" +
+        "YL+35r2762j+OEwZRfcj4xCK7j0mUTcxLtyVGxyY9Ax+ljl5gTwzRhXcJq0T" +
+        "TjiQwKJckeZ837mXQAURbfJpFc3VLAXGfNkMFCR7ZkWpGA1Vzc3PeUNczn2D" +
+        "Lpu8sme55HFFQDi/0akW6Lwv/iCrpwIkZPyZPjaEmwLVALu4E8m0Ka3fJkPV" +
+        "GAhamg9OQpuREIK0pCk3ZSHhJz8qMwduzRZHc4vNCg==";
+
+
+    assertEquals("HTTP/localhost@EXAMPLE.COM", getPrincipal(krb5Default));
+    assertEquals("HTTP/otherhost@ABCDEFG.ORG", getPrincipal(krb5Other));
+    assertEquals("HTTP/localhost@EXAMPLE.COM", getPrincipal(spnegoDefault));
+    assertEquals("HTTP/otherhost@ABCDEFG.ORG", getPrincipal(spnegoOther));
+  }
+
+  private static String getPrincipal(String token) {
+    return KerberosUtil.getTokenServerName(
+        Base64.getDecoder().decode(token));
+  }
 }
 }

+ 26 - 0
hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

@@ -416,4 +416,30 @@
     <Method name="toString"/>
     <Method name="toString"/>
     <Bug pattern="DM_DEFAULT_ENCODING"/>
     <Bug pattern="DM_DEFAULT_ENCODING"/>
   </Match>
   </Match>
+
+  <!-- We need to make the methods public because PBHelperClient calls them. -->
+  <Match>
+    <Class name="org.apache.hadoop.crypto.CipherSuite"/>
+    <Method name="setUnknownValue"/>
+    <Bug pattern="ME_ENUM_FIELD_SETTER"/>
+  </Match>
+  <Match>
+    <Class name="org.apache.hadoop.crypto.CryptoProtocolVersion"/>
+    <Method name="setUnknownValue"/>
+    <Bug pattern="ME_ENUM_FIELD_SETTER"/>
+  </Match>
+
+  <!-- We need to make the method public for testing. -->
+  <Match>
+    <Class name="org.apache.hadoop.metrics2.lib.DefaultMetricsSystem"/>
+    <Method name="setMiniClusterMode"/>
+    <Bug pattern="ME_ENUM_FIELD_SETTER"/>
+  </Match>
+
+  <!-- Experimental interface. Ignore. -->
+  <Match>
+    <Class name="org.apache.hadoop.metrics2.lib.DefaultMetricsFactory"/>
+    <Method name="setInstance"/>
+    <Bug pattern="ME_ENUM_FIELD_SETTER"/>
+  </Match>
 </FindBugsFilter>
 </FindBugsFilter>

+ 4 - 4
hadoop-common-project/hadoop-common/pom.xml

@@ -20,11 +20,11 @@
   <parent>
   <parent>
     <groupId>org.apache.hadoop</groupId>
     <groupId>org.apache.hadoop</groupId>
     <artifactId>hadoop-project-dist</artifactId>
     <artifactId>hadoop-project-dist</artifactId>
-    <version>3.0.0-alpha4-SNAPSHOT</version>
+    <version>3.0.0-beta1-SNAPSHOT</version>
     <relativePath>../../hadoop-project-dist</relativePath>
     <relativePath>../../hadoop-project-dist</relativePath>
   </parent>
   </parent>
   <artifactId>hadoop-common</artifactId>
   <artifactId>hadoop-common</artifactId>
-  <version>3.0.0-alpha4-SNAPSHOT</version>
+  <version>3.0.0-beta1-SNAPSHOT</version>
   <description>Apache Hadoop Common</description>
   <description>Apache Hadoop Common</description>
   <name>Apache Hadoop Common</name>
   <name>Apache Hadoop Common</name>
   <packaging>jar</packaging>
   <packaging>jar</packaging>
@@ -314,8 +314,8 @@
       <scope>compile</scope>
       <scope>compile</scope>
     </dependency>
     </dependency>
     <dependency>
     <dependency>
-      <groupId>com.fasterxml</groupId>
-      <artifactId>aalto-xml</artifactId>
+      <groupId>com.fasterxml.woodstox</groupId>
+      <artifactId>woodstox-core</artifactId>
       <scope>compile</scope>
       <scope>compile</scope>
     </dependency>
     </dependency>
     <dependency>
     <dependency>

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -18,7 +18,7 @@
 
 
 package org.apache.hadoop.conf;
 package org.apache.hadoop.conf;
 
 
-import com.fasterxml.aalto.stax.InputFactoryImpl;
+import com.ctc.wstx.stax.WstxInputFactory;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
@@ -284,7 +284,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * Specify exact input factory to avoid time finding correct one.
    * Specify exact input factory to avoid time finding correct one.
    * Factory is reusable across un-synchronized threads once initialized
    * Factory is reusable across un-synchronized threads once initialized
    */
    */
-  private static final XMLInputFactory2 factory = new InputFactoryImpl();
+  private static final XMLInputFactory2 XML_INPUT_FACTORY = new WstxInputFactory();
 
 
   /**
   /**
    * Class to keep the information about the keys which replace the deprecated
    * Class to keep the information about the keys which replace the deprecated
@@ -2646,7 +2646,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     if (is == null) {
     if (is == null) {
       return null;
       return null;
     }
     }
-    return factory.createXMLStreamReader(systemId, is);
+    return XML_INPUT_FACTORY.createXMLStreamReader(systemId, is);
   }
   }
 
 
   private void loadResources(Properties properties,
   private void loadResources(Properties properties,

+ 6 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/AesCtrCryptoCodec.java

@@ -22,6 +22,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Preconditions;
 
 
+import java.io.IOException;
+
 @InterfaceAudience.Private
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 @InterfaceStability.Evolving
 public abstract class AesCtrCryptoCodec extends CryptoCodec {
 public abstract class AesCtrCryptoCodec extends CryptoCodec {
@@ -61,4 +63,8 @@ public abstract class AesCtrCryptoCodec extends CryptoCodec {
       IV[i] = (byte) sum;
       IV[i] = (byte) sum;
     }
     }
   }
   }
+
+  @Override
+  public void close() throws IOException {
+  }
 }
 }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.crypto;
 package org.apache.hadoop.crypto;
 
 
+import java.io.Closeable;
 import java.security.GeneralSecurityException;
 import java.security.GeneralSecurityException;
 import java.util.List;
 import java.util.List;
 
 
@@ -42,7 +43,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
  */
  */
 @InterfaceAudience.Private
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 @InterfaceStability.Evolving
-public abstract class CryptoCodec implements Configurable {
+public abstract class CryptoCodec implements Configurable, Closeable {
   public static Logger LOG = LoggerFactory.getLogger(CryptoCodec.class);
   public static Logger LOG = LoggerFactory.getLogger(CryptoCodec.class);
   
   
   /**
   /**

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java

@@ -315,6 +315,7 @@ public class CryptoInputStream extends FilterInputStream implements
     
     
     super.close();
     super.close();
     freeBuffers();
     freeBuffers();
+    codec.close();
     closed = true;
     closed = true;
   }
   }
   
   

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java

@@ -239,6 +239,7 @@ public class CryptoOutputStream extends FilterOutputStream implements
       flush();
       flush();
       if (closeOutputStream) {
       if (closeOutputStream) {
         super.close();
         super.close();
+        codec.close();
       }
       }
       freeBuffers();
       freeBuffers();
     } finally {
     } finally {

+ 11 - 11
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java

@@ -71,16 +71,6 @@ public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec {
     }
     }
   }
   }
 
 
-  @Override
-  protected void finalize() throws Throwable {
-    try {
-      Closeable r = (Closeable) this.random;
-      r.close();
-    } catch (ClassCastException e) {
-    }
-    super.finalize();
-  }
-
   @Override
   @Override
   public Configuration getConf() {
   public Configuration getConf() {
     return conf;
     return conf;
@@ -100,7 +90,17 @@ public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec {
   public void generateSecureRandom(byte[] bytes) {
   public void generateSecureRandom(byte[] bytes) {
     random.nextBytes(bytes);
     random.nextBytes(bytes);
   }
   }
-  
+
+  @Override
+  public void close() throws IOException {
+    try {
+      Closeable r = (Closeable) this.random;
+      r.close();
+    } catch (ClassCastException e) {
+    }
+    super.close();
+  }
+
   private static class OpensslAesCtrCipher implements Encryptor, Decryptor {
   private static class OpensslAesCtrCipher implements Encryptor, Decryptor {
     private final OpensslCipher cipher;
     private final OpensslCipher cipher;
     private final int mode;
     private final int mode;

+ 18 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java

@@ -274,12 +274,16 @@ public class KeyProviderCryptoExtension extends
       // Generate random bytes for new key and IV
       // Generate random bytes for new key and IV
 
 
       CryptoCodec cc = CryptoCodec.getInstance(keyProvider.getConf());
       CryptoCodec cc = CryptoCodec.getInstance(keyProvider.getConf());
-      final byte[] newKey = new byte[encryptionKey.getMaterial().length];
-      cc.generateSecureRandom(newKey);
-      final byte[] iv = new byte[cc.getCipherSuite().getAlgorithmBlockSize()];
-      cc.generateSecureRandom(iv);
-      Encryptor encryptor = cc.createEncryptor();
-      return generateEncryptedKey(encryptor, encryptionKey, newKey, iv);
+      try {
+        final byte[] newKey = new byte[encryptionKey.getMaterial().length];
+        cc.generateSecureRandom(newKey);
+        final byte[] iv = new byte[cc.getCipherSuite().getAlgorithmBlockSize()];
+        cc.generateSecureRandom(iv);
+        Encryptor encryptor = cc.createEncryptor();
+        return generateEncryptedKey(encryptor, encryptionKey, newKey, iv);
+      } finally {
+        cc.close();
+      }
     }
     }
 
 
     private EncryptedKeyVersion generateEncryptedKey(final Encryptor encryptor,
     private EncryptedKeyVersion generateEncryptedKey(final Encryptor encryptor,
@@ -322,9 +326,13 @@ public class KeyProviderCryptoExtension extends
 
 
       final KeyVersion dek = decryptEncryptedKey(ekv);
       final KeyVersion dek = decryptEncryptedKey(ekv);
       final CryptoCodec cc = CryptoCodec.getInstance(keyProvider.getConf());
       final CryptoCodec cc = CryptoCodec.getInstance(keyProvider.getConf());
-      final Encryptor encryptor = cc.createEncryptor();
-      return generateEncryptedKey(encryptor, ekNow, dek.getMaterial(),
-          ekv.getEncryptedKeyIv());
+      try {
+        final Encryptor encryptor = cc.createEncryptor();
+        return generateEncryptedKey(encryptor, ekNow, dek.getMaterial(),
+            ekv.getEncryptedKeyIv());
+      } finally {
+        cc.close();
+      }
     }
     }
 
 
     @Override
     @Override
@@ -364,6 +372,7 @@ public class KeyProviderCryptoExtension extends
       bbOut.flip();
       bbOut.flip();
       byte[] decryptedKey = new byte[keyLen];
       byte[] decryptedKey = new byte[keyLen];
       bbOut.get(decryptedKey);
       bbOut.get(decryptedKey);
+      cc.close();
       return new KeyVersion(encryptionKey.getName(), EK, decryptedKey);
       return new KeyVersion(encryptionKey.getName(), EK, decryptedKey);
     }
     }
 
 

+ 10 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java

@@ -590,6 +590,10 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
       // failure. Unfortunately, the AuthenticationFilter returns 403 when it
       // failure. Unfortunately, the AuthenticationFilter returns 403 when it
       // cannot authenticate (Since a 401 requires Server to send
       // cannot authenticate (Since a 401 requires Server to send
       // WWW-Authenticate header as well)..
       // WWW-Authenticate header as well)..
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Response={}({}), resetting authToken",
+            conn.getResponseCode(), conn.getResponseMessage());
+      }
       KMSClientProvider.this.authToken =
       KMSClientProvider.this.authToken =
           new DelegationTokenAuthenticatedURL.Token();
           new DelegationTokenAuthenticatedURL.Token();
       if (authRetryCount > 0) {
       if (authRetryCount > 0) {
@@ -604,6 +608,10 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
     }
     }
     try {
     try {
       AuthenticatedURL.extractToken(conn, authToken);
       AuthenticatedURL.extractToken(conn, authToken);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Extracted token, authToken={}, its dt={}", authToken,
+            authToken.getDelegationToken());
+      }
     } catch (AuthenticationException e) {
     } catch (AuthenticationException e) {
       // Ignore the AuthExceptions.. since we are just using the method to
       // Ignore the AuthExceptions.. since we are just using the method to
       // extract and set the authToken.. (Workaround till we actually fix
       // extract and set the authToken.. (Workaround till we actually fix
@@ -1055,11 +1063,13 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
           public Token<?> run() throws Exception {
           public Token<?> run() throws Exception {
             // Not using the cached token here.. Creating a new token here
             // Not using the cached token here.. Creating a new token here
             // everytime.
             // everytime.
+            LOG.debug("Getting new token from {}, renewer:{}", url, renewer);
             return authUrl.getDelegationToken(url,
             return authUrl.getDelegationToken(url,
                 new DelegationTokenAuthenticatedURL.Token(), renewer, doAsUser);
                 new DelegationTokenAuthenticatedURL.Token(), renewer, doAsUser);
           }
           }
         });
         });
         if (token != null) {
         if (token != null) {
+          LOG.debug("New token received: ({})", token);
           credentials.addToken(token.getService(), token);
           credentials.addToken(token.getService(), token);
           tokens = new Token<?>[] { token };
           tokens = new Token<?>[] { token };
         } else {
         } else {

+ 5 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java

@@ -116,4 +116,9 @@ public class OsSecureRandom extends Random implements Closeable, Configurable {
       stream = null;
       stream = null;
     }
     }
   }
   }
+
+  @Override
+  protected void finalize() throws Throwable {
+    close();
+  }
 }
 }

+ 15 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java

@@ -39,6 +39,7 @@ public class ContentSummary extends QuotaUsage implements Writable{
   private long snapshotFileCount;
   private long snapshotFileCount;
   private long snapshotDirectoryCount;
   private long snapshotDirectoryCount;
   private long snapshotSpaceConsumed;
   private long snapshotSpaceConsumed;
+  private String erasureCodingPolicy;
 
 
   /** We don't use generics. Instead override spaceConsumed and other methods
   /** We don't use generics. Instead override spaceConsumed and other methods
       in order to keep backward compatibility. */
       in order to keep backward compatibility. */
@@ -81,6 +82,11 @@ public class ContentSummary extends QuotaUsage implements Writable{
       return this;
       return this;
     }
     }
 
 
+    public Builder erasureCodingPolicy(String ecPolicy) {
+      this.erasureCodingPolicy = ecPolicy;
+      return this;
+    }
+
     @Override
     @Override
     public Builder quota(long quota){
     public Builder quota(long quota){
       super.quota(quota);
       super.quota(quota);
@@ -136,6 +142,7 @@ public class ContentSummary extends QuotaUsage implements Writable{
     private long snapshotFileCount;
     private long snapshotFileCount;
     private long snapshotDirectoryCount;
     private long snapshotDirectoryCount;
     private long snapshotSpaceConsumed;
     private long snapshotSpaceConsumed;
+    private String erasureCodingPolicy;
   }
   }
 
 
   /** Constructor deprecated by ContentSummary.Builder*/
   /** Constructor deprecated by ContentSummary.Builder*/
@@ -175,6 +182,7 @@ public class ContentSummary extends QuotaUsage implements Writable{
     this.snapshotFileCount = builder.snapshotFileCount;
     this.snapshotFileCount = builder.snapshotFileCount;
     this.snapshotDirectoryCount = builder.snapshotDirectoryCount;
     this.snapshotDirectoryCount = builder.snapshotDirectoryCount;
     this.snapshotSpaceConsumed = builder.snapshotSpaceConsumed;
     this.snapshotSpaceConsumed = builder.snapshotSpaceConsumed;
+    this.erasureCodingPolicy = builder.erasureCodingPolicy;
   }
   }
 
 
   /** @return the length */
   /** @return the length */
@@ -202,6 +210,10 @@ public class ContentSummary extends QuotaUsage implements Writable{
     return snapshotSpaceConsumed;
     return snapshotSpaceConsumed;
   }
   }
 
 
+  public String getErasureCodingPolicy() {
+    return erasureCodingPolicy;
+  }
+
   @Override
   @Override
   @InterfaceAudience.Private
   @InterfaceAudience.Private
   public void write(DataOutput out) throws IOException {
   public void write(DataOutput out) throws IOException {
@@ -237,6 +249,7 @@ public class ContentSummary extends QuotaUsage implements Writable{
           getSnapshotFileCount() == right.getSnapshotFileCount() &&
           getSnapshotFileCount() == right.getSnapshotFileCount() &&
           getSnapshotDirectoryCount() == right.getSnapshotDirectoryCount() &&
           getSnapshotDirectoryCount() == right.getSnapshotDirectoryCount() &&
           getSnapshotSpaceConsumed() == right.getSnapshotSpaceConsumed() &&
           getSnapshotSpaceConsumed() == right.getSnapshotSpaceConsumed() &&
+          getErasureCodingPolicy().equals(right.getErasureCodingPolicy()) &&
           super.equals(to);
           super.equals(to);
     } else {
     } else {
       return super.equals(to);
       return super.equals(to);
@@ -247,7 +260,8 @@ public class ContentSummary extends QuotaUsage implements Writable{
   public int hashCode() {
   public int hashCode() {
     long result = getLength() ^ getFileCount() ^ getDirectoryCount()
     long result = getLength() ^ getFileCount() ^ getDirectoryCount()
         ^ getSnapshotLength() ^ getSnapshotFileCount()
         ^ getSnapshotLength() ^ getSnapshotFileCount()
-        ^ getSnapshotDirectoryCount() ^ getSnapshotSpaceConsumed();
+        ^ getSnapshotDirectoryCount() ^ getSnapshotSpaceConsumed()
+        ^ getErasureCodingPolicy().hashCode();
     return ((int) result) ^ super.hashCode();
     return ((int) result) ^ super.hashCode();
   }
   }
 
 

+ 125 - 46
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java

@@ -18,36 +18,70 @@
 package org.apache.hadoop.fs;
 package org.apache.hadoop.fs;
 
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Preconditions;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Progressable;
 
 
+import javax.annotation.Nonnull;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.EnumSet;
 import java.util.EnumSet;
 
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
 
 
-/** Base of specific file system FSDataOutputStreamBuilder. */
+/**
+ * Builder for {@link FSDataOutputStream} and its subclasses.
+ *
+ * It is used to create {@link FSDataOutputStream} when creating a new file or
+ * appending an existing file on {@link FileSystem}.
+ *
+ * By default, it does not create parent directory that do not exist.
+ * {@link FileSystem#createNonRecursive(Path, boolean, int, short, long,
+ * Progressable)}.
+ *
+ * To create missing parent directory, use {@link #recursive()}.
+ */
 @InterfaceAudience.Private
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
-public class FSDataOutputStreamBuilder {
-  private Path path = null;
+public abstract class FSDataOutputStreamBuilder
+    <S extends FSDataOutputStream, B extends FSDataOutputStreamBuilder<S, B>> {
+  private final FileSystem fs;
+  private final Path path;
   private FsPermission permission = null;
   private FsPermission permission = null;
-  private Integer bufferSize;
-  private Short replication;
-  private Long blockSize;
+  private int bufferSize;
+  private short replication;
+  private long blockSize;
+  /** set to true to create missing directory. */
+  private boolean recursive = false;
+  private final EnumSet<CreateFlag> flags = EnumSet.noneOf(CreateFlag.class);
   private Progressable progress = null;
   private Progressable progress = null;
-  private EnumSet<CreateFlag> flags = null;
   private ChecksumOpt checksumOpt = null;
   private ChecksumOpt checksumOpt = null;
 
 
-  private final FileSystem fs;
-
-  protected FSDataOutputStreamBuilder(FileSystem fileSystem, Path p) {
+  /**
+   * Return the concrete implementation of the builder instance.
+   */
+  protected abstract B getThisBuilder();
+
+  /**
+   * Constructor.
+   */
+  protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem,
+      @Nonnull Path p) {
+    Preconditions.checkNotNull(fileSystem);
+    Preconditions.checkNotNull(p);
     fs = fileSystem;
     fs = fileSystem;
     path = p;
     path = p;
+    bufferSize = fs.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
+        IO_FILE_BUFFER_SIZE_DEFAULT);
+    replication = fs.getDefaultReplication(path);
+    blockSize = fs.getDefaultBlockSize(p);
+  }
+
+  protected FileSystem getFS() {
+    return fs;
   }
   }
 
 
   protected Path getPath() {
   protected Path getPath() {
@@ -56,91 +90,136 @@ public class FSDataOutputStreamBuilder {
 
 
   protected FsPermission getPermission() {
   protected FsPermission getPermission() {
     if (permission == null) {
     if (permission == null) {
-      return FsPermission.getFileDefault();
+      permission = FsPermission.getFileDefault();
     }
     }
     return permission;
     return permission;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setPermission(final FsPermission perm) {
+  /**
+   * Set permission for the file.
+   */
+  public B permission(@Nonnull final FsPermission perm) {
     Preconditions.checkNotNull(perm);
     Preconditions.checkNotNull(perm);
     permission = perm;
     permission = perm;
-    return this;
+    return getThisBuilder();
   }
   }
 
 
   protected int getBufferSize() {
   protected int getBufferSize() {
-    if (bufferSize == null) {
-      return fs.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
-          IO_FILE_BUFFER_SIZE_DEFAULT);
-    }
     return bufferSize;
     return bufferSize;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setBufferSize(int bufSize) {
+  /**
+   * Set the size of the buffer to be used.
+   */
+  public B bufferSize(int bufSize) {
     bufferSize = bufSize;
     bufferSize = bufSize;
-    return this;
+    return getThisBuilder();
   }
   }
 
 
   protected short getReplication() {
   protected short getReplication() {
-    if (replication == null) {
-      return fs.getDefaultReplication(getPath());
-    }
     return replication;
     return replication;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setReplication(short replica) {
+  /**
+   * Set replication factor.
+   */
+  public B replication(short replica) {
     replication = replica;
     replication = replica;
-    return this;
+    return getThisBuilder();
   }
   }
 
 
   protected long getBlockSize() {
   protected long getBlockSize() {
-    if (blockSize == null) {
-      return fs.getDefaultBlockSize(getPath());
-    }
     return blockSize;
     return blockSize;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setBlockSize(long blkSize) {
+  /**
+   * Set block size.
+   */
+  public B blockSize(long blkSize) {
     blockSize = blkSize;
     blockSize = blkSize;
-    return this;
+    return getThisBuilder();
+  }
+
+  /**
+   * Return true to create the parent directories if they do not exist.
+   */
+  protected boolean isRecursive() {
+    return recursive;
+  }
+
+  /**
+   * Create the parent directory if they do not exist.
+   */
+  public B recursive() {
+    recursive = true;
+    return getThisBuilder();
   }
   }
 
 
   protected Progressable getProgress() {
   protected Progressable getProgress() {
     return progress;
     return progress;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setProgress(final Progressable prog) {
+  /**
+   * Set the facility of reporting progress.
+   */
+  public B progress(@Nonnull final Progressable prog) {
     Preconditions.checkNotNull(prog);
     Preconditions.checkNotNull(prog);
     progress = prog;
     progress = prog;
-    return this;
+    return getThisBuilder();
   }
   }
 
 
   protected EnumSet<CreateFlag> getFlags() {
   protected EnumSet<CreateFlag> getFlags() {
-    if (flags == null) {
-      return EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE);
-    }
     return flags;
     return flags;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setFlags(
-      final EnumSet<CreateFlag> enumFlags) {
-    Preconditions.checkNotNull(enumFlags);
-    flags = enumFlags;
-    return this;
+  /**
+   * Create an FSDataOutputStream at the specified path.
+   */
+  public B create() {
+    flags.add(CreateFlag.CREATE);
+    return getThisBuilder();
+  }
+
+  /**
+   * Set to true to overwrite the existing file.
+   * Set it to false, an exception will be thrown when calling {@link #build()}
+   * if the file exists.
+   */
+  public B overwrite(boolean overwrite) {
+    if (overwrite) {
+      flags.add(CreateFlag.OVERWRITE);
+    } else {
+      flags.remove(CreateFlag.OVERWRITE);
+    }
+    return getThisBuilder();
+  }
+
+  /**
+   * Append to an existing file (optional operation).
+   */
+  public B append() {
+    flags.add(CreateFlag.APPEND);
+    return getThisBuilder();
   }
   }
 
 
   protected ChecksumOpt getChecksumOpt() {
   protected ChecksumOpt getChecksumOpt() {
     return checksumOpt;
     return checksumOpt;
   }
   }
 
 
-  public FSDataOutputStreamBuilder setChecksumOpt(
-      final ChecksumOpt chksumOpt) {
+  /**
+   * Set checksum opt.
+   */
+  public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) {
     Preconditions.checkNotNull(chksumOpt);
     Preconditions.checkNotNull(chksumOpt);
     checksumOpt = chksumOpt;
     checksumOpt = chksumOpt;
-    return this;
+    return getThisBuilder();
   }
   }
 
 
-  public FSDataOutputStream build() throws IOException {
-    return fs.create(getPath(), getPermission(), getFlags(), getBufferSize(),
-        getReplication(), getBlockSize(), getProgress(), getChecksumOpt());
-  }
+  /**
+   * Create the FSDataOutputStream to write on the file system.
+   *
+   * @throws HadoopIllegalArgumentException if the parameters are not valid.
+   * @throws IOException on errors when file system creates or appends the file.
+   */
+  public abstract S build() throws IOException;
 }
 }

+ 39 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

@@ -4140,8 +4140,34 @@ public abstract class FileSystem extends Configured implements Closeable {
     return GlobalStorageStatistics.INSTANCE;
     return GlobalStorageStatistics.INSTANCE;
   }
   }
 
 
+  private static final class FileSystemDataOutputStreamBuilder extends
+      FSDataOutputStreamBuilder<FSDataOutputStream,
+        FileSystemDataOutputStreamBuilder> {
+
+    /**
+     * Constructor.
+     */
+    protected FileSystemDataOutputStreamBuilder(FileSystem fileSystem, Path p) {
+      super(fileSystem, p);
+    }
+
+    @Override
+    public FSDataOutputStream build() throws IOException {
+      return getFS().create(getPath(), getPermission(), getFlags(),
+          getBufferSize(), getReplication(), getBlockSize(), getProgress(),
+          getChecksumOpt());
+    }
+
+    @Override
+    protected FileSystemDataOutputStreamBuilder getThisBuilder() {
+      return this;
+    }
+  }
+
   /**
   /**
    * Create a new FSDataOutputStreamBuilder for the file with path.
    * Create a new FSDataOutputStreamBuilder for the file with path.
+   * Files are overwritten by default.
+   *
    * @param path file path
    * @param path file path
    * @return a FSDataOutputStreamBuilder object to build the file
    * @return a FSDataOutputStreamBuilder object to build the file
    *
    *
@@ -4149,7 +4175,18 @@ public abstract class FileSystem extends Configured implements Closeable {
    * builder interface becomes stable.
    * builder interface becomes stable.
    */
    */
   @InterfaceAudience.Private
   @InterfaceAudience.Private
-  protected FSDataOutputStreamBuilder newFSDataOutputStreamBuilder(Path path) {
-    return new FSDataOutputStreamBuilder(this, path);
+  protected FSDataOutputStreamBuilder createFile(Path path) {
+    return new FileSystemDataOutputStreamBuilder(this, path)
+        .create().overwrite(true);
+  }
+
+  /**
+   * Create a Builder to append a file.
+   * @param path file path.
+   * @return a {@link FSDataOutputStreamBuilder} to build file append request.
+   */
+  @InterfaceAudience.Private
+  protected FSDataOutputStreamBuilder appendFile(Path path) {
+    return new FileSystemDataOutputStreamBuilder(this, path).append();
   }
   }
 }
 }

+ 4 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -126,8 +126,10 @@ public class FileUtil {
     file.deleteOnExit();
     file.deleteOnExit();
     if (file.isDirectory()) {
     if (file.isDirectory()) {
       File[] files = file.listFiles();
       File[] files = file.listFiles();
-      for (File child : files) {
-        fullyDeleteOnExit(child);
+      if (files != null) {
+        for (File child : files) {
+          fullyDeleteOnExit(child);
+        }
       }
       }
     }
     }
   }
   }

+ 7 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java

@@ -667,7 +667,12 @@ public class FilterFileSystem extends FileSystem {
   }
   }
 
 
   @Override
   @Override
-  protected FSDataOutputStreamBuilder newFSDataOutputStreamBuilder(Path path) {
-    return fs.newFSDataOutputStreamBuilder(path);
+  public FSDataOutputStreamBuilder createFile(Path path) {
+    return fs.createFile(path);
+  }
+
+  @Override
+  public FSDataOutputStreamBuilder appendFile(Path path) {
+    return fs.appendFile(path);
   }
   }
 }
 }

+ 7 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java

@@ -1270,7 +1270,12 @@ public class HarFileSystem extends FileSystem {
   }
   }
 
 
   @Override
   @Override
-  public FSDataOutputStreamBuilder newFSDataOutputStreamBuilder(Path path) {
-    return fs.newFSDataOutputStreamBuilder(path);
+  public FSDataOutputStreamBuilder createFile(Path path) {
+    return fs.createFile(path);
+  }
+
+  @Override
+  public FSDataOutputStreamBuilder appendFile(Path path) {
+    return fs.appendFile(path);
   }
   }
 }
 }

+ 10 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -384,13 +384,16 @@ public class RawLocalFileSystem extends FileSystem {
     // again.
     // again.
     try {
     try {
       FileStatus sdst = this.getFileStatus(dst);
       FileStatus sdst = this.getFileStatus(dst);
-      if (sdst.isDirectory() && dstFile.list().length == 0) {
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("Deleting empty destination and renaming " + src + " to " +
-              dst);
-        }
-        if (this.delete(dst, false) && srcFile.renameTo(dstFile)) {
-          return true;
+      String[] dstFileList = dstFile.list();
+      if (dstFileList != null) {
+        if (sdst.isDirectory() && dstFileList.length == 0) {
+          if (LOG.isDebugEnabled()) {
+            LOG.debug("Deleting empty destination and renaming " + src +
+                " to " + dst);
+          }
+          if (this.delete(dst, false) && srcFile.renameTo(dstFile)) {
+            return true;
+          }
         }
         }
       }
       }
     } catch (FileNotFoundException ignored) {
     } catch (FileNotFoundException ignored) {

+ 5 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java

@@ -415,16 +415,17 @@ public class FTPFileSystem extends FileSystem {
     return client.removeDirectory(pathName);
     return client.removeDirectory(pathName);
   }
   }
 
 
-  private FsAction getFsAction(int accessGroup, FTPFile ftpFile) {
+  @VisibleForTesting
+  FsAction getFsAction(int accessGroup, FTPFile ftpFile) {
     FsAction action = FsAction.NONE;
     FsAction action = FsAction.NONE;
     if (ftpFile.hasPermission(accessGroup, FTPFile.READ_PERMISSION)) {
     if (ftpFile.hasPermission(accessGroup, FTPFile.READ_PERMISSION)) {
-      action.or(FsAction.READ);
+      action = action.or(FsAction.READ);
     }
     }
     if (ftpFile.hasPermission(accessGroup, FTPFile.WRITE_PERMISSION)) {
     if (ftpFile.hasPermission(accessGroup, FTPFile.WRITE_PERMISSION)) {
-      action.or(FsAction.WRITE);
+      action = action.or(FsAction.WRITE);
     }
     }
     if (ftpFile.hasPermission(accessGroup, FTPFile.EXECUTE_PERMISSION)) {
     if (ftpFile.hasPermission(accessGroup, FTPFile.EXECUTE_PERMISSION)) {
-      action.or(FsAction.EXECUTE);
+      action = action.or(FsAction.EXECUTE);
     }
     }
     return action;
     return action;
   }
   }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java

@@ -501,7 +501,7 @@ abstract class CommandWithDestination extends FsCommand {
                         createFlags,
                         createFlags,
                         getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
                         getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
                             IO_FILE_BUFFER_SIZE_DEFAULT),
                             IO_FILE_BUFFER_SIZE_DEFAULT),
-                        lazyPersist ? 1 : getDefaultReplication(item.path),
+                        (short) 1,
                         getDefaultBlockSize(),
                         getDefaultBlockSize(),
                         null,
                         null,
                         null);
                         null);

+ 32 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java

@@ -55,12 +55,14 @@ public class Count extends FsCommand {
   private static final String OPTION_EXCLUDE_SNAPSHOT = "x";
   private static final String OPTION_EXCLUDE_SNAPSHOT = "x";
   //return the quota, namespace count and disk space usage.
   //return the quota, namespace count and disk space usage.
   private static final String OPTION_QUOTA_AND_USAGE = "u";
   private static final String OPTION_QUOTA_AND_USAGE = "u";
+  private static final String OPTION_ECPOLICY = "e";
 
 
   public static final String NAME = "count";
   public static final String NAME = "count";
   public static final String USAGE =
   public static final String USAGE =
       "[-" + OPTION_QUOTA + "] [-" + OPTION_HUMAN + "] [-" + OPTION_HEADER
       "[-" + OPTION_QUOTA + "] [-" + OPTION_HUMAN + "] [-" + OPTION_HEADER
           + "] [-" + OPTION_TYPE + " [<storage type>]] [-" +
           + "] [-" + OPTION_TYPE + " [<storage type>]] [-" +
           OPTION_QUOTA_AND_USAGE + "] [-" + OPTION_EXCLUDE_SNAPSHOT
           OPTION_QUOTA_AND_USAGE + "] [-" + OPTION_EXCLUDE_SNAPSHOT
+          + "] [-" + OPTION_ECPOLICY
           + "] <path> ...";
           + "] <path> ...";
   public static final String DESCRIPTION =
   public static final String DESCRIPTION =
       "Count the number of directories, files and bytes under the paths\n" +
       "Count the number of directories, files and bytes under the paths\n" +
@@ -90,7 +92,8 @@ public class Count extends FsCommand {
           "It can also pass the value '', 'all' or 'ALL' to specify all " +
           "It can also pass the value '', 'all' or 'ALL' to specify all " +
           "the storage types.\n" +
           "the storage types.\n" +
           "The -" + OPTION_QUOTA_AND_USAGE + " option shows the quota and \n" +
           "The -" + OPTION_QUOTA_AND_USAGE + " option shows the quota and \n" +
-          "the usage against the quota without the detailed content summary.";
+          "the usage against the quota without the detailed content summary."+
+          "The -"+ OPTION_ECPOLICY +" option shows the erasure coding policy.";
 
 
   private boolean showQuotas;
   private boolean showQuotas;
   private boolean humanReadable;
   private boolean humanReadable;
@@ -98,6 +101,7 @@ public class Count extends FsCommand {
   private List<StorageType> storageTypes = null;
   private List<StorageType> storageTypes = null;
   private boolean showQuotasAndUsageOnly;
   private boolean showQuotasAndUsageOnly;
   private boolean excludeSnapshots;
   private boolean excludeSnapshots;
+  private boolean displayECPolicy;
 
 
   /** Constructor */
   /** Constructor */
   public Count() {}
   public Count() {}
@@ -118,7 +122,8 @@ public class Count extends FsCommand {
   protected void processOptions(LinkedList<String> args) {
   protected void processOptions(LinkedList<String> args) {
     CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE,
     CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE,
         OPTION_QUOTA, OPTION_HUMAN, OPTION_HEADER, OPTION_QUOTA_AND_USAGE,
         OPTION_QUOTA, OPTION_HUMAN, OPTION_HEADER, OPTION_QUOTA_AND_USAGE,
-        OPTION_EXCLUDE_SNAPSHOT);
+        OPTION_EXCLUDE_SNAPSHOT,
+        OPTION_ECPOLICY);
     cf.addOptionWithValue(OPTION_TYPE);
     cf.addOptionWithValue(OPTION_TYPE);
     cf.parse(args);
     cf.parse(args);
     if (args.isEmpty()) { // default path is the current working directory
     if (args.isEmpty()) { // default path is the current working directory
@@ -128,6 +133,7 @@ public class Count extends FsCommand {
     humanReadable = cf.getOpt(OPTION_HUMAN);
     humanReadable = cf.getOpt(OPTION_HUMAN);
     showQuotasAndUsageOnly = cf.getOpt(OPTION_QUOTA_AND_USAGE);
     showQuotasAndUsageOnly = cf.getOpt(OPTION_QUOTA_AND_USAGE);
     excludeSnapshots = cf.getOpt(OPTION_EXCLUDE_SNAPSHOT);
     excludeSnapshots = cf.getOpt(OPTION_EXCLUDE_SNAPSHOT);
+    displayECPolicy = cf.getOpt(OPTION_ECPOLICY);
 
 
     if (showQuotas || showQuotasAndUsageOnly) {
     if (showQuotas || showQuotasAndUsageOnly) {
       String types = cf.getOptValue(OPTION_TYPE);
       String types = cf.getOptValue(OPTION_TYPE);
@@ -146,15 +152,21 @@ public class Count extends FsCommand {
     }
     }
 
 
     if (cf.getOpt(OPTION_HEADER)) {
     if (cf.getOpt(OPTION_HEADER)) {
+      StringBuilder headString = new StringBuilder();
       if (showQuotabyType) {
       if (showQuotabyType) {
-        out.println(QuotaUsage.getStorageTypeHeader(storageTypes) + "PATHNAME");
+        headString.append(QuotaUsage.getStorageTypeHeader(storageTypes));
       } else {
       } else {
         if (showQuotasAndUsageOnly) {
         if (showQuotasAndUsageOnly) {
-          out.println(QuotaUsage.getHeader() + "PATHNAME");
+          headString.append(QuotaUsage.getHeader());
         } else {
         } else {
-          out.println(ContentSummary.getHeader(showQuotas) + "PATHNAME");
+          headString.append(ContentSummary.getHeader(showQuotas));
         }
         }
       }
       }
+      if(displayECPolicy){
+        headString.append("ERASURECODING_POLICY ");
+      }
+      headString.append("PATHNAME");
+      out.println(headString.toString());
     }
     }
   }
   }
 
 
@@ -175,15 +187,26 @@ public class Count extends FsCommand {
 
 
   @Override
   @Override
   protected void processPath(PathData src) throws IOException {
   protected void processPath(PathData src) throws IOException {
+    StringBuilder outputString = new StringBuilder();
     if (showQuotasAndUsageOnly || showQuotabyType) {
     if (showQuotasAndUsageOnly || showQuotabyType) {
       QuotaUsage usage = src.fs.getQuotaUsage(src.path);
       QuotaUsage usage = src.fs.getQuotaUsage(src.path);
-      out.println(usage.toString(isHumanReadable(), showQuotabyType,
-          storageTypes) + src);
+      outputString.append(usage.toString(
+          isHumanReadable(), showQuotabyType, storageTypes));
     } else {
     } else {
       ContentSummary summary = src.fs.getContentSummary(src.path);
       ContentSummary summary = src.fs.getContentSummary(src.path);
-      out.println(summary.
-          toString(showQuotas, isHumanReadable(), excludeSnapshots) + src);
+      outputString.append(summary.toString(
+          showQuotas, isHumanReadable(), excludeSnapshots));
+    }
+    if(displayECPolicy){
+      ContentSummary summary = src.fs.getContentSummary(src.path);
+      if(!summary.getErasureCodingPolicy().equals("Replicated")){
+        outputString.append("EC:");
+      }
+      outputString.append(summary.getErasureCodingPolicy());
+      outputString.append(" ");
     }
     }
+    outputString.append(src);
+    out.println(outputString.toString());
   }
   }
 
 
   /**
   /**

+ 47 - 16
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java

@@ -32,6 +32,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.ContentSummary;
 
 
 /**
 /**
  * Get a listing of all files in that match the file patterns.
  * Get a listing of all files in that match the file patterns.
@@ -54,13 +55,14 @@ class Ls extends FsCommand {
   private static final String OPTION_MTIME = "t";
   private static final String OPTION_MTIME = "t";
   private static final String OPTION_ATIME = "u";
   private static final String OPTION_ATIME = "u";
   private static final String OPTION_SIZE = "S";
   private static final String OPTION_SIZE = "S";
+  private static final String OPTION_ECPOLICY = "e";
 
 
   public static final String NAME = "ls";
   public static final String NAME = "ls";
   public static final String USAGE = "[-" + OPTION_PATHONLY + "] [-" +
   public static final String USAGE = "[-" + OPTION_PATHONLY + "] [-" +
       OPTION_DIRECTORY + "] [-" + OPTION_HUMAN + "] [-" +
       OPTION_DIRECTORY + "] [-" + OPTION_HUMAN + "] [-" +
       OPTION_HIDENONPRINTABLE + "] [-" + OPTION_RECURSIVE + "] [-" +
       OPTION_HIDENONPRINTABLE + "] [-" + OPTION_RECURSIVE + "] [-" +
       OPTION_MTIME + "] [-" + OPTION_SIZE + "] [-" + OPTION_REVERSE + "] [-" +
       OPTION_MTIME + "] [-" + OPTION_SIZE + "] [-" + OPTION_REVERSE + "] [-" +
-      OPTION_ATIME + "] [<path> ...]";
+      OPTION_ATIME + "] [-" + OPTION_ECPOLICY +"] [<path> ...]";
 
 
   public static final String DESCRIPTION =
   public static final String DESCRIPTION =
       "List the contents that match the specified file pattern. If " +
       "List the contents that match the specified file pattern. If " +
@@ -91,7 +93,9 @@ class Ls extends FsCommand {
           "  Reverse the order of the sort.\n" +
           "  Reverse the order of the sort.\n" +
           "  -" + OPTION_ATIME +
           "  -" + OPTION_ATIME +
           "  Use time of last access instead of modification for\n" +
           "  Use time of last access instead of modification for\n" +
-          "      display and sorting.";
+          "      display and sorting.\n"+
+          "  -" + OPTION_ECPOLICY +
+          "  Display the erasure coding policy of files and directories.\n";
 
 
   protected final SimpleDateFormat dateFormat =
   protected final SimpleDateFormat dateFormat =
     new SimpleDateFormat("yyyy-MM-dd HH:mm");
     new SimpleDateFormat("yyyy-MM-dd HH:mm");
@@ -104,6 +108,7 @@ class Ls extends FsCommand {
   private boolean orderTime;
   private boolean orderTime;
   private boolean orderSize;
   private boolean orderSize;
   private boolean useAtime;
   private boolean useAtime;
+  private boolean displayECPolicy;
   private Comparator<PathData> orderComparator;
   private Comparator<PathData> orderComparator;
 
 
   protected boolean humanReadable = false;
   protected boolean humanReadable = false;
@@ -129,7 +134,7 @@ class Ls extends FsCommand {
     CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE,
     CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE,
         OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN,
         OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN,
         OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE,
         OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE,
-        OPTION_MTIME, OPTION_SIZE, OPTION_ATIME);
+        OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY);
     cf.parse(args);
     cf.parse(args);
     pathOnly = cf.getOpt(OPTION_PATHONLY);
     pathOnly = cf.getOpt(OPTION_PATHONLY);
     dirRecurse = !cf.getOpt(OPTION_DIRECTORY);
     dirRecurse = !cf.getOpt(OPTION_DIRECTORY);
@@ -140,6 +145,7 @@ class Ls extends FsCommand {
     orderTime = cf.getOpt(OPTION_MTIME);
     orderTime = cf.getOpt(OPTION_MTIME);
     orderSize = !orderTime && cf.getOpt(OPTION_SIZE);
     orderSize = !orderTime && cf.getOpt(OPTION_SIZE);
     useAtime = cf.getOpt(OPTION_ATIME);
     useAtime = cf.getOpt(OPTION_ATIME);
+    displayECPolicy = cf.getOpt(OPTION_ECPOLICY);
     if (args.isEmpty()) args.add(Path.CUR_DIR);
     if (args.isEmpty()) args.add(Path.CUR_DIR);
 
 
     initialiseOrderComparator();
     initialiseOrderComparator();
@@ -245,25 +251,42 @@ class Ls extends FsCommand {
       return;
       return;
     }
     }
     FileStatus stat = item.stat;
     FileStatus stat = item.stat;
-    String line = String.format(lineFormat,
-        (stat.isDirectory() ? "d" : "-"),
-        stat.getPermission() + (stat.getPermission().getAclBit() ? "+" : " "),
-        (stat.isFile() ? stat.getReplication() : "-"),
-        stat.getOwner(),
-        stat.getGroup(),
-        formatSize(stat.getLen()),
-        dateFormat.format(new Date(isUseAtime()
-            ? stat.getAccessTime()
-            : stat.getModificationTime())),
-        isHideNonPrintable() ? new PrintableString(item.toString()) : item);
-    out.println(line);
+    if (displayECPolicy) {
+      ContentSummary contentSummary = item.fs.getContentSummary(item.path);
+      String line = String.format(lineFormat,
+          (stat.isDirectory() ? "d" : "-"),
+          stat.getPermission() + (stat.getPermission().getAclBit() ? "+" : " "),
+          (stat.isFile() ? stat.getReplication() : "-"),
+          stat.getOwner(),
+          stat.getGroup(),
+          contentSummary.getErasureCodingPolicy(),
+          formatSize(stat.getLen()),
+          dateFormat.format(new Date(isUseAtime()
+              ? stat.getAccessTime()
+              : stat.getModificationTime())),
+          isHideNonPrintable() ? new PrintableString(item.toString()) : item);
+      out.println(line);
+    } else {
+      String line = String.format(lineFormat,
+          (stat.isDirectory() ? "d" : "-"),
+          stat.getPermission() + (stat.getPermission().getAclBit() ? "+" : " "),
+          (stat.isFile() ? stat.getReplication() : "-"),
+          stat.getOwner(),
+          stat.getGroup(),
+          formatSize(stat.getLen()),
+          dateFormat.format(new Date(isUseAtime()
+              ? stat.getAccessTime()
+              : stat.getModificationTime())),
+          isHideNonPrintable() ? new PrintableString(item.toString()) : item);
+      out.println(line);
+    }
   }
   }
 
 
   /**
   /**
    * Compute column widths and rebuild the format string
    * Compute column widths and rebuild the format string
    * @param items to find the max field width for each column
    * @param items to find the max field width for each column
    */
    */
-  private void adjustColumnWidths(PathData items[]) {
+  private void adjustColumnWidths(PathData items[]) throws IOException {
     for (PathData item : items) {
     for (PathData item : items) {
       FileStatus stat = item.stat;
       FileStatus stat = item.stat;
       maxRepl  = maxLength(maxRepl, stat.getReplication());
       maxRepl  = maxLength(maxRepl, stat.getReplication());
@@ -278,6 +301,14 @@ class Ls extends FsCommand {
     // Do not use '%-0s' as a formatting conversion, since it will throw a
     // Do not use '%-0s' as a formatting conversion, since it will throw a
     // a MissingFormatWidthException if it is used in String.format().
     // a MissingFormatWidthException if it is used in String.format().
     // http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html#intFlags
     // http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html#intFlags
+    if(displayECPolicy){
+      int maxEC=0;
+      for (PathData item : items) {
+          ContentSummary contentSummary = item.fs.getContentSummary(item.path);
+          maxEC=maxLength(maxEC,contentSummary.getErasureCodingPolicy().length());
+      }
+      fmt.append(" %"+maxEC+"s ");
+    }
     fmt.append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s");
     fmt.append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s");
     fmt.append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s");
     fmt.append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s");
     fmt.append("%"  + maxLen   + "s ");
     fmt.append("%"  + maxLen   + "s ");

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java

@@ -1080,7 +1080,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
         List<ACL> acl = zkClient.getACL(path, stat);
         List<ACL> acl = zkClient.getACL(path, stat);
         if (acl == null || !acl.containsAll(zkAcl) ||
         if (acl == null || !acl.containsAll(zkAcl) ||
             !zkAcl.containsAll(acl)) {
             !zkAcl.containsAll(acl)) {
-          zkClient.setACL(path, zkAcl, stat.getVersion());
+          zkClient.setACL(path, zkAcl, stat.getAversion());
         }
         }
         return null;
         return null;
       }
       }

+ 10 - 13
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java

@@ -24,16 +24,18 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogConfigurationException;
 import org.apache.commons.logging.LogConfigurationException;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.log4j.Appender;
 import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
 import org.eclipse.jetty.server.NCSARequestLog;
 import org.eclipse.jetty.server.NCSARequestLog;
 import org.eclipse.jetty.server.RequestLog;
 import org.eclipse.jetty.server.RequestLog;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
 /**
  * RequestLog object for use with Http
  * RequestLog object for use with Http
  */
  */
 public class HttpRequestLog {
 public class HttpRequestLog {
 
 
-  public static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(HttpRequestLog.class);
   private static final HashMap<String, String> serverToComponent;
   private static final HashMap<String, String> serverToComponent;
 
 
   static {
   static {
@@ -65,20 +67,18 @@ public class HttpRequestLog {
     }
     }
     if (isLog4JLogger) {
     if (isLog4JLogger) {
       Log4JLogger httpLog4JLog = (Log4JLogger)logger;
       Log4JLogger httpLog4JLog = (Log4JLogger)logger;
-      Logger httpLogger = httpLog4JLog.getLogger();
+      org.apache.log4j.Logger httpLogger = httpLog4JLog.getLogger();
       Appender appender = null;
       Appender appender = null;
 
 
       try {
       try {
         appender = httpLogger.getAppender(appenderName);
         appender = httpLogger.getAppender(appenderName);
       } catch (LogConfigurationException e) {
       } catch (LogConfigurationException e) {
-        LOG.warn("Http request log for " + loggerName
-            + " could not be created");
+        LOG.warn("Http request log for {} could not be created", loggerName);
         throw e;
         throw e;
       }
       }
 
 
       if (appender == null) {
       if (appender == null) {
-        LOG.info("Http request log for " + loggerName
-            + " is not defined");
+        LOG.info("Http request log for {} is not defined", loggerName);
         return null;
         return null;
       }
       }
 
 
@@ -89,14 +89,11 @@ public class HttpRequestLog {
         requestLog.setFilename(requestLogAppender.getFilename());
         requestLog.setFilename(requestLogAppender.getFilename());
         requestLog.setRetainDays(requestLogAppender.getRetainDays());
         requestLog.setRetainDays(requestLogAppender.getRetainDays());
         return requestLog;
         return requestLog;
-      }
-      else {
-        LOG.warn("Jetty request log for " + loggerName
-            + " was of the wrong class");
+      } else {
+        LOG.warn("Jetty request log for {} was of the wrong class", loggerName);
         return null;
         return null;
       }
       }
-    }
-    else {
+    } else {
       LOG.warn("Jetty request log can only be enabled using Log4j");
       LOG.warn("Jetty request log can only be enabled using Log4j");
       return null;
       return null;
     }
     }

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java

@@ -431,6 +431,7 @@ public final class HttpServer2 implements FilterContainer {
       HttpConfiguration httpConfig = new HttpConfiguration();
       HttpConfiguration httpConfig = new HttpConfiguration();
       httpConfig.setRequestHeaderSize(requestHeaderSize);
       httpConfig.setRequestHeaderSize(requestHeaderSize);
       httpConfig.setResponseHeaderSize(responseHeaderSize);
       httpConfig.setResponseHeaderSize(responseHeaderSize);
+      httpConfig.setSendServerVersion(false);
 
 
       for (URI ep : endpoints) {
       for (URI ep : endpoints) {
         final ServerConnector connector;
         final ServerConnector connector;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java

@@ -75,7 +75,7 @@ public class DoubleWritable implements WritableComparable<DoubleWritable> {
   
   
   @Override
   @Override
   public int compareTo(DoubleWritable o) {
   public int compareTo(DoubleWritable o) {
-    return (value < o.value ? -1 : (value == o.value ? 0 : 1));
+    return Double.compare(value, o.value);
   }
   }
   
   
   @Override
   @Override
@@ -94,7 +94,7 @@ public class DoubleWritable implements WritableComparable<DoubleWritable> {
                        byte[] b2, int s2, int l2) {
                        byte[] b2, int s2, int l2) {
       double thisValue = readDouble(b1, s1);
       double thisValue = readDouble(b1, s1);
       double thatValue = readDouble(b2, s2);
       double thatValue = readDouble(b2, s2);
-      return (thisValue < thatValue ? -1 : (thisValue == thatValue ? 0 : 1));
+      return Double.compare(thisValue, thatValue);
     }
     }
   }
   }
 
 

+ 2 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java

@@ -66,9 +66,7 @@ public class FloatWritable implements WritableComparable<FloatWritable> {
   /** Compares two FloatWritables. */
   /** Compares two FloatWritables. */
   @Override
   @Override
   public int compareTo(FloatWritable o) {
   public int compareTo(FloatWritable o) {
-    float thisValue = this.value;
-    float thatValue = o.value;
-    return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
+    return Float.compare(value, o.value);
   }
   }
 
 
   @Override
   @Override
@@ -86,7 +84,7 @@ public class FloatWritable implements WritableComparable<FloatWritable> {
                        byte[] b2, int s2, int l2) {
                        byte[] b2, int s2, int l2) {
       float thisValue = readFloat(b1, s1);
       float thisValue = readFloat(b1, s1);
       float thatValue = readFloat(b2, s2);
       float thatValue = readFloat(b2, s2);
-      return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
+      return Float.compare(thisValue, thatValue);
     }
     }
   }
   }
 
 

+ 32 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java

@@ -38,6 +38,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell;
+import org.slf4j.Logger;
 
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
@@ -245,7 +246,10 @@ public class IOUtils {
    *
    *
    * @param log the log to record problems to at debug level. Can be null.
    * @param log the log to record problems to at debug level. Can be null.
    * @param closeables the objects to close
    * @param closeables the objects to close
+   * @deprecated use {@link #cleanupWithLogger(Logger, java.io.Closeable...)}
+   * instead
    */
    */
+  @Deprecated
   public static void cleanup(Log log, java.io.Closeable... closeables) {
   public static void cleanup(Log log, java.io.Closeable... closeables) {
     for (java.io.Closeable c : closeables) {
     for (java.io.Closeable c : closeables) {
       if (c != null) {
       if (c != null) {
@@ -260,6 +264,28 @@ public class IOUtils {
     }
     }
   }
   }
 
 
+  /**
+   * Close the Closeable objects and <b>ignore</b> any {@link Throwable} or
+   * null pointers. Must only be used for cleanup in exception handlers.
+   *
+   * @param logger the log to record problems to at debug level. Can be null.
+   * @param closeables the objects to close
+   */
+  public static void cleanupWithLogger(Logger logger,
+      java.io.Closeable... closeables) {
+    for (java.io.Closeable c : closeables) {
+      if (c != null) {
+        try {
+          c.close();
+        } catch (Throwable e) {
+          if (logger != null) {
+            logger.debug("Exception in closing {}", c, e);
+          }
+        }
+      }
+    }
+  }
+
   /**
   /**
    * Closes the stream ignoring {@link Throwable}.
    * Closes the stream ignoring {@link Throwable}.
    * Must only be called in cleaning up from exception handlers.
    * Must only be called in cleaning up from exception handlers.
@@ -348,9 +374,12 @@ public class IOUtils {
     try (DirectoryStream<Path> stream =
     try (DirectoryStream<Path> stream =
              Files.newDirectoryStream(dir.toPath())) {
              Files.newDirectoryStream(dir.toPath())) {
       for (Path entry: stream) {
       for (Path entry: stream) {
-        String fileName = entry.getFileName().toString();
-        if ((filter == null) || filter.accept(dir, fileName)) {
-          list.add(fileName);
+        Path fileName = entry.getFileName();
+        if (fileName != null) {
+          String fileNameStr = fileName.toString();
+          if ((filter == null) || filter.accept(dir, fileNameStr)) {
+            list.add(fileNameStr);
+          }
         }
         }
       }
       }
     } catch (DirectoryIteratorException e) {
     } catch (DirectoryIteratorException e) {

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java

@@ -189,8 +189,8 @@ public final class ECSchema {
     sb.append((extraOptions.isEmpty() ? "" : ", "));
     sb.append((extraOptions.isEmpty() ? "" : ", "));
 
 
     int i = 0;
     int i = 0;
-    for (String opt : extraOptions.keySet()) {
-      sb.append(opt + "=" + extraOptions.get(opt) +
+    for (Map.Entry<String, String> entry : extraOptions.entrySet()) {
+      sb.append(entry.getKey() + "=" + entry.getValue() +
           (++i < extraOptions.size() ? ", " : ""));
           (++i < extraOptions.size() ? ", " : ""));
     }
     }
 
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java

@@ -62,6 +62,6 @@ public class ErasureEncodingStep implements ErasureCodingStep {
 
 
   @Override
   @Override
   public void finish() {
   public void finish() {
-    rawEncoder.release();
+    // do nothing
   }
   }
 }
 }

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+import org.apache.hadoop.util.PerformanceAdvisory;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
 
 
@@ -66,7 +67,7 @@ abstract class AbstractNativeRawDecoder extends RawErasureDecoder {
 
 
   @Override
   @Override
   protected void doDecode(ByteArrayDecodingState decodingState) {
   protected void doDecode(ByteArrayDecodingState decodingState) {
-    LOG.warn("convertToByteBufferState is invoked, " +
+    PerformanceAdvisory.LOG.debug("convertToByteBufferState is invoked, " +
         "not efficiently. Please use direct ByteBuffer inputs/outputs");
         "not efficiently. Please use direct ByteBuffer inputs/outputs");
 
 
     ByteBufferDecodingState bbdState = decodingState.convertToByteBufferState();
     ByteBufferDecodingState bbdState = decodingState.convertToByteBufferState();
@@ -78,6 +79,11 @@ abstract class AbstractNativeRawDecoder extends RawErasureDecoder {
     }
     }
   }
   }
 
 
+  @Override
+  public boolean preferDirectBuffer() {
+    return true;
+  }
+
   // To link with the underlying data structure in the native layer.
   // To link with the underlying data structure in the native layer.
   // No get/set as only used by native codes.
   // No get/set as only used by native codes.
   private long nativeCoder;
   private long nativeCoder;

+ 7 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
 import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
+import org.apache.hadoop.util.PerformanceAdvisory;
 import org.slf4j.Logger;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.slf4j.LoggerFactory;
 
 
@@ -63,7 +64,7 @@ abstract class AbstractNativeRawEncoder extends RawErasureEncoder {
 
 
   @Override
   @Override
   protected void doEncode(ByteArrayEncodingState encodingState) {
   protected void doEncode(ByteArrayEncodingState encodingState) {
-    LOG.warn("convertToByteBufferState is invoked, " +
+    PerformanceAdvisory.LOG.debug("convertToByteBufferState is invoked, " +
         "not efficiently. Please use direct ByteBuffer inputs/outputs");
         "not efficiently. Please use direct ByteBuffer inputs/outputs");
 
 
     ByteBufferEncodingState bbeState = encodingState.convertToByteBufferState();
     ByteBufferEncodingState bbeState = encodingState.convertToByteBufferState();
@@ -75,6 +76,11 @@ abstract class AbstractNativeRawEncoder extends RawErasureEncoder {
     }
     }
   }
   }
 
 
+  @Override
+  public boolean preferDirectBuffer() {
+    return true;
+  }
+
   // To link with the underlying data structure in the native layer.
   // To link with the underlying data structure in the native layer.
   // No get/set as only used by native codes.
   // No get/set as only used by native codes.
   private long nativeCoder;
   private long nativeCoder;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java

@@ -395,7 +395,7 @@ public final class Utils {
 
 
     @Override
     @Override
     public int hashCode() {
     public int hashCode() {
-      return (major << 16 + minor);
+      return (major << 16) + minor;
     }
     }
   }
   }
 
 

+ 9 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsRegistry.java

@@ -313,6 +313,15 @@ public class MetricsRegistry {
     return rates;
     return rates;
   }
   }
 
 
+  public synchronized MutableRollingAverages newMutableRollingAverages(
+      String name, String valueName) {
+    checkMetricName(name);
+    MutableRollingAverages rollingAverages =
+        new MutableRollingAverages(valueName);
+    metricsMap.put(name, rollingAverages);
+    return rollingAverages;
+  }
+
   synchronized void add(String name, MutableMetric metric) {
   synchronized void add(String name, MutableMetric metric) {
     checkMetricName(name);
     checkMetricName(name);
     metricsMap.put(name, metric);
     metricsMap.put(name, metric);

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java

@@ -78,6 +78,10 @@ public class MutableMetricsFactory {
                               annotation.sampleName(), annotation.valueName(),
                               annotation.sampleName(), annotation.valueName(),
                               annotation.always());
                               annotation.always());
     }
     }
+    if (cls == MutableRollingAverages.class) {
+      return registry.newMutableRollingAverages(info.name(),
+          annotation.valueName());
+    }
     throw new MetricsException("Unsupported metric field "+ field.getName() +
     throw new MetricsException("Unsupported metric field "+ field.getName() +
                                " of type "+ field.getType().getName());
                                " of type "+ field.getType().getName());
   }
   }

+ 35 - 41
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/RollingAverages.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java

@@ -31,6 +31,7 @@ import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
 import java.util.function.Function;
 
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -57,29 +58,30 @@ import static org.apache.hadoop.metrics2.lib.Interns.*;
  */
  */
 @InterfaceAudience.Public
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 @InterfaceStability.Evolving
-public class RollingAverages extends MutableMetric implements Closeable {
+public class MutableRollingAverages extends MutableMetric implements Closeable {
 
 
-  private final MutableRatesWithAggregation innerMetrics =
+  private MutableRatesWithAggregation innerMetrics =
       new MutableRatesWithAggregation();
       new MutableRatesWithAggregation();
 
 
-  private static final ScheduledExecutorService SCHEDULER = Executors
+  @VisibleForTesting
+  static final ScheduledExecutorService SCHEDULER = Executors
       .newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true)
       .newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true)
-          .setNameFormat("RollingAverages-%d").build());
+          .setNameFormat("MutableRollingAverages-%d").build());
 
 
   private ScheduledFuture<?> scheduledTask = null;
   private ScheduledFuture<?> scheduledTask = null;
 
 
   @Nullable
   @Nullable
   private Map<String, MutableRate> currentSnapshot;
   private Map<String, MutableRate> currentSnapshot;
 
 
-  private final int numWindows;
   private final String avgInfoNameTemplate;
   private final String avgInfoNameTemplate;
   private final String avgInfoDescTemplate;
   private final String avgInfoDescTemplate;
+  private int numWindows;
 
 
   private static class SumAndCount {
   private static class SumAndCount {
     private final double sum;
     private final double sum;
     private final long count;
     private final long count;
 
 
-    public SumAndCount(final double sum, final long count) {
+    SumAndCount(final double sum, final long count) {
       this.sum = sum;
       this.sum = sum;
       this.count = count;
       this.count = count;
     }
     }
@@ -105,44 +107,36 @@ public class RollingAverages extends MutableMetric implements Closeable {
   private Map<String, LinkedBlockingDeque<SumAndCount>> averages =
   private Map<String, LinkedBlockingDeque<SumAndCount>> averages =
       new ConcurrentHashMap<>();
       new ConcurrentHashMap<>();
 
 
+  private static final long WINDOW_SIZE_MS_DEFAULT = 300_000;
+  private static final int NUM_WINDOWS_DEFAULT = 36;
+
   /**
   /**
-   * Constructor of {@link RollingAverages}.
-   * @param windowSizeMs
-   *          The number of milliseconds of each window for which subset
-   *          of samples are gathered to compute the rolling average, A.K.A.
-   *          roll over interval.
-   * @param numWindows
-   *          The number of windows maintained to compute the rolling average.
-   * @param valueName
-   *          of the metric (e.g. "Time", "Latency")
+   * Constructor for {@link MutableRollingAverages}.
+   * @param metricValueName
    */
    */
-  public RollingAverages(
-      final long windowSizeMs,
-      final int numWindows,
-      final String valueName) {
-    String uvName = StringUtils.capitalize(valueName);
-    String lvName = StringUtils.uncapitalize(valueName);
-    avgInfoNameTemplate = "[%s]" + "RollingAvg"+ uvName;
-    avgInfoDescTemplate = "Rolling average "+ lvName +" for "+ "%s";
-    this.numWindows = numWindows;
+  public MutableRollingAverages(String metricValueName) {
+    if (metricValueName == null) {
+      metricValueName = "";
+    }
+    avgInfoNameTemplate = "[%s]" + "RollingAvg" +
+        StringUtils.capitalize(metricValueName);
+    avgInfoDescTemplate = "Rolling average " +
+        StringUtils.uncapitalize(metricValueName) +" for "+ "%s";
+    numWindows = NUM_WINDOWS_DEFAULT;
     scheduledTask = SCHEDULER.scheduleAtFixedRate(new RatesRoller(this),
     scheduledTask = SCHEDULER.scheduleAtFixedRate(new RatesRoller(this),
-        windowSizeMs, windowSizeMs, TimeUnit.MILLISECONDS);
+        WINDOW_SIZE_MS_DEFAULT, WINDOW_SIZE_MS_DEFAULT, TimeUnit.MILLISECONDS);
   }
   }
 
 
   /**
   /**
-   * Constructor of {@link RollingAverages}.
-   * @param windowSizeMs
-   *          The number of seconds of each window for which sub set of samples
-   *          are gathered to compute rolling average, also A.K.A roll over
-   *          interval.
-   * @param numWindows
-   *          The number of windows maintained in the same time to compute the
-   *          average of the rolling averages.
+   * This method is for testing only to replace the scheduledTask.
    */
    */
-  public RollingAverages(
-      final long windowSizeMs,
-      final int numWindows) {
-    this(windowSizeMs, numWindows, "Time");
+  @VisibleForTesting
+  synchronized void replaceScheduledTask(int windows, long interval,
+                                         TimeUnit timeUnit) {
+    numWindows = windows;
+    scheduledTask.cancel(true);
+    scheduledTask = SCHEDULER.scheduleAtFixedRate(new RatesRoller(this),
+        interval, interval, timeUnit);
   }
   }
 
 
   @Override
   @Override
@@ -190,9 +184,9 @@ public class RollingAverages extends MutableMetric implements Closeable {
   }
   }
 
 
   private static class RatesRoller implements Runnable {
   private static class RatesRoller implements Runnable {
-    private final RollingAverages parent;
+    private final MutableRollingAverages parent;
 
 
-    public RatesRoller(final RollingAverages parent) {
+    RatesRoller(final MutableRollingAverages parent) {
       this.parent = parent;
       this.parent = parent;
     }
     }
 
 
@@ -218,7 +212,7 @@ public class RollingAverages extends MutableMetric implements Closeable {
 
 
   /**
   /**
    * Iterates over snapshot to capture all Avg metrics into rolling structure
    * Iterates over snapshot to capture all Avg metrics into rolling structure
-   * {@link RollingAverages#averages}.
+   * {@link MutableRollingAverages#averages}.
    */
    */
   private synchronized void rollOverAvgs() {
   private synchronized void rollOverAvgs() {
     if (currentSnapshot == null) {
     if (currentSnapshot == null) {
@@ -232,7 +226,7 @@ public class RollingAverages extends MutableMetric implements Closeable {
           new Function<String, LinkedBlockingDeque<SumAndCount>>() {
           new Function<String, LinkedBlockingDeque<SumAndCount>>() {
             @Override
             @Override
             public LinkedBlockingDeque<SumAndCount> apply(String k) {
             public LinkedBlockingDeque<SumAndCount> apply(String k) {
-              return new LinkedBlockingDeque<SumAndCount>(numWindows);
+              return new LinkedBlockingDeque<>(numWindows);
             }
             }
           });
           });
       final SumAndCount sumAndCount = new SumAndCount(
       final SumAndCount sumAndCount = new SumAndCount(

+ 0 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java

@@ -45,7 +45,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
 public class NetworkTopology {
 public class NetworkTopology {
   public final static String DEFAULT_RACK = "/default-rack";
   public final static String DEFAULT_RACK = "/default-rack";
-  public final static int DEFAULT_HOST_LEVEL = 2;
   public static final Logger LOG =
   public static final Logger LOG =
       LoggerFactory.getLogger(NetworkTopology.class);
       LoggerFactory.getLogger(NetworkTopology.class);
 
 

+ 34 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java

@@ -130,6 +130,19 @@ public class LdapGroupsMapping
   public static final String BASE_DN_KEY = LDAP_CONFIG_PREFIX + ".base";
   public static final String BASE_DN_KEY = LDAP_CONFIG_PREFIX + ".base";
   public static final String BASE_DN_DEFAULT = "";
   public static final String BASE_DN_DEFAULT = "";
 
 
+  /*
+   * Base DN used in user search.
+   */
+  public static final String USER_BASE_DN_KEY =
+          LDAP_CONFIG_PREFIX + ".userbase";
+
+  /*
+   * Base DN used in group search.
+   */
+  public static final String GROUP_BASE_DN_KEY =
+          LDAP_CONFIG_PREFIX + ".groupbase";
+
+
   /*
   /*
    * Any additional filters to apply when searching for users
    * Any additional filters to apply when searching for users
    */
    */
@@ -200,7 +213,7 @@ public class LdapGroupsMapping
 
 
   private static final Log LOG = LogFactory.getLog(LdapGroupsMapping.class);
   private static final Log LOG = LogFactory.getLog(LdapGroupsMapping.class);
 
 
-  private static final SearchControls SEARCH_CONTROLS = new SearchControls();
+  static final SearchControls SEARCH_CONTROLS = new SearchControls();
   static {
   static {
     SEARCH_CONTROLS.setSearchScope(SearchControls.SUBTREE_SCOPE);
     SEARCH_CONTROLS.setSearchScope(SearchControls.SUBTREE_SCOPE);
   }
   }
@@ -214,7 +227,8 @@ public class LdapGroupsMapping
   private String keystorePass;
   private String keystorePass;
   private String bindUser;
   private String bindUser;
   private String bindPassword;
   private String bindPassword;
-  private String baseDN;
+  private String userbaseDN;
+  private String groupbaseDN;
   private String groupSearchFilter;
   private String groupSearchFilter;
   private String userSearchFilter;
   private String userSearchFilter;
   private String memberOfAttr;
   private String memberOfAttr;
@@ -315,7 +329,7 @@ public class LdapGroupsMapping
       uidNumber = uidAttribute.get().toString();
       uidNumber = uidAttribute.get().toString();
     }
     }
     if (uidNumber != null && gidNumber != null) {
     if (uidNumber != null && gidNumber != null) {
-      return c.search(baseDN,
+      return c.search(groupbaseDN,
               "(&"+ groupSearchFilter + "(|(" + posixGidAttr + "={0})" +
               "(&"+ groupSearchFilter + "(|(" + posixGidAttr + "={0})" +
                   "(" + groupMemberAttr + "={1})))",
                   "(" + groupMemberAttr + "={1})))",
               new Object[] {gidNumber, uidNumber},
               new Object[] {gidNumber, uidNumber},
@@ -350,7 +364,7 @@ public class LdapGroupsMapping
     } else {
     } else {
       String userDn = result.getNameInNamespace();
       String userDn = result.getNameInNamespace();
       groupResults =
       groupResults =
-          c.search(baseDN,
+          c.search(groupbaseDN,
               "(&" + groupSearchFilter + "(" + groupMemberAttr + "={0}))",
               "(&" + groupSearchFilter + "(" + groupMemberAttr + "={0}))",
               new Object[]{userDn},
               new Object[]{userDn},
               SEARCH_CONTROLS);
               SEARCH_CONTROLS);
@@ -391,7 +405,7 @@ public class LdapGroupsMapping
     DirContext c = getDirContext();
     DirContext c = getDirContext();
 
 
     // Search for the user. We'll only ever need to look at the first result
     // Search for the user. We'll only ever need to look at the first result
-    NamingEnumeration<SearchResult> results = c.search(baseDN,
+    NamingEnumeration<SearchResult> results = c.search(userbaseDN,
         userSearchFilter, new Object[]{user}, SEARCH_CONTROLS);
         userSearchFilter, new Object[]{user}, SEARCH_CONTROLS);
     // return empty list if the user can not be found.
     // return empty list if the user can not be found.
     if (!results.hasMoreElements()) {
     if (!results.hasMoreElements()) {
@@ -489,7 +503,7 @@ public class LdapGroupsMapping
     filter.append("))");
     filter.append("))");
     LOG.debug("Ldap group query string: " + filter.toString());
     LOG.debug("Ldap group query string: " + filter.toString());
     NamingEnumeration<SearchResult> groupResults =
     NamingEnumeration<SearchResult> groupResults =
-        context.search(baseDN,
+        context.search(groupbaseDN,
            filter.toString(),
            filter.toString(),
            SEARCH_CONTROLS);
            SEARCH_CONTROLS);
     while (groupResults.hasMoreElements()) {
     while (groupResults.hasMoreElements()) {
@@ -575,7 +589,20 @@ public class LdapGroupsMapping
           conf.get(BIND_PASSWORD_FILE_KEY, BIND_PASSWORD_FILE_DEFAULT));
           conf.get(BIND_PASSWORD_FILE_KEY, BIND_PASSWORD_FILE_DEFAULT));
     }
     }
     
     
-    baseDN = conf.get(BASE_DN_KEY, BASE_DN_DEFAULT);
+    String baseDN = conf.getTrimmed(BASE_DN_KEY, BASE_DN_DEFAULT);
+
+    //User search base which defaults to base dn.
+    userbaseDN = conf.getTrimmed(USER_BASE_DN_KEY, baseDN);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Usersearch baseDN: " + userbaseDN);
+    }
+
+    //Group search base which defaults to base dn.
+    groupbaseDN = conf.getTrimmed(GROUP_BASE_DN_KEY, baseDN);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Groupsearch baseDN: " + userbaseDN);
+    }
+
     groupSearchFilter =
     groupSearchFilter =
         conf.get(GROUP_SEARCH_FILTER_KEY, GROUP_SEARCH_FILTER_DEFAULT);
         conf.get(GROUP_SEARCH_FILTER_KEY, GROUP_SEARCH_FILTER_DEFAULT);
     userSearchFilter =
     userSearchFilter =

+ 1 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java

@@ -629,6 +629,7 @@ extends AbstractDelegationTokenIdentifier>
     // don't hold lock on 'this' to avoid edit log updates blocking token ops
     // don't hold lock on 'this' to avoid edit log updates blocking token ops
     for (TokenIdent ident : expiredTokens) {
     for (TokenIdent ident : expiredTokens) {
       logExpireToken(ident);
       logExpireToken(ident);
+      LOG.info("Removing expired token " + formatTokenId(ident));
       removeStoredToken(ident);
       removeStoredToken(ident);
     }
     }
   }
   }

+ 2 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java

@@ -881,11 +881,9 @@ public abstract class ZKDelegationTokenSecretManager<TokenIdent extends Abstract
     String nodeCreatePath =
     String nodeCreatePath =
         getNodePath(ZK_DTSM_TOKENS_ROOT, DELEGATION_TOKEN_PREFIX
         getNodePath(ZK_DTSM_TOKENS_ROOT, DELEGATION_TOKEN_PREFIX
             + ident.getSequenceNumber());
             + ident.getSequenceNumber());
-    ByteArrayOutputStream tokenOs = new ByteArrayOutputStream();
-    DataOutputStream tokenOut = new DataOutputStream(tokenOs);
-    ByteArrayOutputStream seqOs = new ByteArrayOutputStream();
 
 
-    try {
+    try (ByteArrayOutputStream tokenOs = new ByteArrayOutputStream();
+         DataOutputStream tokenOut = new DataOutputStream(tokenOs)) {
       ident.write(tokenOut);
       ident.write(tokenOut);
       tokenOut.writeLong(info.getRenewDate());
       tokenOut.writeLong(info.getRenewDate());
       tokenOut.writeInt(info.getPassword().length);
       tokenOut.writeInt(info.getPassword().length);
@@ -902,8 +900,6 @@ public abstract class ZKDelegationTokenSecretManager<TokenIdent extends Abstract
         zkClient.create().withMode(CreateMode.PERSISTENT)
         zkClient.create().withMode(CreateMode.PERSISTENT)
             .forPath(nodeCreatePath, tokenOs.toByteArray());
             .forPath(nodeCreatePath, tokenOs.toByteArray());
       }
       }
-    } finally {
-      seqOs.close();
     }
     }
   }
   }
 
 

+ 11 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.java

@@ -29,6 +29,8 @@ import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
@@ -61,6 +63,9 @@ import java.util.Map;
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
 public class DelegationTokenAuthenticatedURL extends AuthenticatedURL {
 public class DelegationTokenAuthenticatedURL extends AuthenticatedURL {
 
 
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DelegationTokenAuthenticatedURL.class);
+
   /**
   /**
    * Constant used in URL's query string to perform a proxy user request, the
    * Constant used in URL's query string to perform a proxy user request, the
    * value of the <code>DO_AS</code> parameter is the user the request will be
    * value of the <code>DO_AS</code> parameter is the user the request will be
@@ -283,17 +288,23 @@ public class DelegationTokenAuthenticatedURL extends AuthenticatedURL {
     Map<String, String> extraParams = new HashMap<String, String>();
     Map<String, String> extraParams = new HashMap<String, String>();
     org.apache.hadoop.security.token.Token<? extends TokenIdentifier> dToken
     org.apache.hadoop.security.token.Token<? extends TokenIdentifier> dToken
         = null;
         = null;
+    LOG.debug("Connecting to url {} with token {} as {}", url, token, doAs);
     // if we have valid auth token, it takes precedence over a delegation token
     // if we have valid auth token, it takes precedence over a delegation token
     // and we don't even look for one.
     // and we don't even look for one.
     if (!token.isSet()) {
     if (!token.isSet()) {
       // delegation token
       // delegation token
       Credentials creds = UserGroupInformation.getCurrentUser().
       Credentials creds = UserGroupInformation.getCurrentUser().
           getCredentials();
           getCredentials();
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Token not set, looking for delegation token. Creds:{}",
+            creds.getAllTokens());
+      }
       if (!creds.getAllTokens().isEmpty()) {
       if (!creds.getAllTokens().isEmpty()) {
         InetSocketAddress serviceAddr = new InetSocketAddress(url.getHost(),
         InetSocketAddress serviceAddr = new InetSocketAddress(url.getHost(),
             url.getPort());
             url.getPort());
         Text service = SecurityUtil.buildTokenService(serviceAddr);
         Text service = SecurityUtil.buildTokenService(serviceAddr);
         dToken = creds.getToken(service);
         dToken = creds.getToken(service);
+        LOG.debug("Using delegation token {} from service:{}", dToken, service);
         if (dToken != null) {
         if (dToken != null) {
           if (useQueryStringForDelegationToken()) {
           if (useQueryStringForDelegationToken()) {
             // delegation token will go in the query string, injecting it
             // delegation token will go in the query string, injecting it

+ 8 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java

@@ -48,6 +48,8 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.util.HttpExceptionUtils;
 import org.apache.hadoop.util.HttpExceptionUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.JsonGenerator;
@@ -79,6 +81,8 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceStability.Evolving
 @InterfaceStability.Evolving
 public abstract class DelegationTokenAuthenticationHandler
 public abstract class DelegationTokenAuthenticationHandler
     implements AuthenticationHandler {
     implements AuthenticationHandler {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DelegationTokenAuthenticationHandler.class);
 
 
   protected static final String TYPE_POSTFIX = "-dt";
   protected static final String TYPE_POSTFIX = "-dt";
 
 
@@ -220,6 +224,7 @@ public abstract class DelegationTokenAuthenticationHandler
       HttpServletRequest request, HttpServletResponse response)
       HttpServletRequest request, HttpServletResponse response)
       throws IOException, AuthenticationException {
       throws IOException, AuthenticationException {
     boolean requestContinues = true;
     boolean requestContinues = true;
+    LOG.trace("Processing operation for req=({}), token: {}", request, token);
     String op = ServletUtils.getParameter(request,
     String op = ServletUtils.getParameter(request,
         KerberosDelegationTokenAuthenticator.OP_PARAM);
         KerberosDelegationTokenAuthenticator.OP_PARAM);
     op = (op != null) ? StringUtils.toUpperCase(op) : null;
     op = (op != null) ? StringUtils.toUpperCase(op) : null;
@@ -232,6 +237,7 @@ public abstract class DelegationTokenAuthenticationHandler
         if (dtOp.requiresKerberosCredentials() && token == null) {
         if (dtOp.requiresKerberosCredentials() && token == null) {
           // Don't authenticate via DT for DT ops.
           // Don't authenticate via DT for DT ops.
           token = authHandler.authenticate(request, response);
           token = authHandler.authenticate(request, response);
+          LOG.trace("Got token: {}.", token);
           if (token == null) {
           if (token == null) {
             requestContinues = false;
             requestContinues = false;
             doManagement = false;
             doManagement = false;
@@ -380,6 +386,7 @@ public abstract class DelegationTokenAuthenticationHandler
     AuthenticationToken token;
     AuthenticationToken token;
     String delegationParam = getDelegationToken(request);
     String delegationParam = getDelegationToken(request);
     if (delegationParam != null) {
     if (delegationParam != null) {
+      LOG.debug("Authenticating with dt param: {}", delegationParam);
       try {
       try {
         Token<AbstractDelegationTokenIdentifier> dt = new Token();
         Token<AbstractDelegationTokenIdentifier> dt = new Token();
         dt.decodeFromUrlString(delegationParam);
         dt.decodeFromUrlString(delegationParam);
@@ -397,6 +404,7 @@ public abstract class DelegationTokenAuthenticationHandler
             HttpServletResponse.SC_FORBIDDEN, new AuthenticationException(ex));
             HttpServletResponse.SC_FORBIDDEN, new AuthenticationException(ex));
       }
       }
     } else {
     } else {
+      LOG.debug("Falling back to {} (req={})", authHandler.getClass(), request);
       token = authHandler.authenticate(request, response);
       token = authHandler.authenticate(request, response);
     }
     }
     return token;
     return token;

+ 11 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java

@@ -115,10 +115,16 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
     if (token instanceof DelegationTokenAuthenticatedURL.Token) {
     if (token instanceof DelegationTokenAuthenticatedURL.Token) {
       hasDt = ((DelegationTokenAuthenticatedURL.Token) token).
       hasDt = ((DelegationTokenAuthenticatedURL.Token) token).
           getDelegationToken() != null;
           getDelegationToken() != null;
+      if (hasDt) {
+        LOG.trace("Delegation token found: {}",
+            ((DelegationTokenAuthenticatedURL.Token) token)
+                .getDelegationToken());
+      }
     }
     }
     if (!hasDt) {
     if (!hasDt) {
       String queryStr = url.getQuery();
       String queryStr = url.getQuery();
       hasDt = (queryStr != null) && queryStr.contains(DELEGATION_PARAM + "=");
       hasDt = (queryStr != null) && queryStr.contains(DELEGATION_PARAM + "=");
+      LOG.trace("hasDt={}, queryStr={}", hasDt, queryStr);
     }
     }
     return hasDt;
     return hasDt;
   }
   }
@@ -129,7 +135,12 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
     if (!hasDelegationToken(url, token)) {
     if (!hasDelegationToken(url, token)) {
       // check and renew TGT to handle potential expiration
       // check and renew TGT to handle potential expiration
       UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
       UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
+      LOG.debug("No delegation token found for url={}, token={}, authenticating"
+          + " with {}", url, token, authenticator.getClass());
       authenticator.authenticate(url, token);
       authenticator.authenticate(url, token);
+    } else {
+      LOG.debug("Authenticated from delegation token. url={}, token={}",
+          url, token);
     }
     }
   }
   }
 
 

+ 20 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java

@@ -25,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.slf4j.Logger;
 
 
 /**
 /**
  * This class contains a set of methods to work with services, especially
  * This class contains a set of methods to work with services, especially
@@ -87,6 +88,25 @@ public final class ServiceOperations {
     return null;
     return null;
   }
   }
 
 
+  /**
+   * Stop a service; if it is null do nothing. Exceptions are caught and
+   * logged at warn level. (but not Throwables). This operation is intended to
+   * be used in cleanup operations
+   *
+   * @param log the log to warn at
+   * @param service a service; may be null
+   * @return any exception that was caught; null if none was.
+   * @see ServiceOperations#stopQuietly(Service)
+   */
+  public static Exception stopQuietly(Logger log, Service service) {
+    try {
+      stop(service);
+    } catch (Exception e) {
+      log.warn("When stopping the service {} : {}", service.getName(), e, e);
+      return e;
+    }
+    return null;
+  }
 
 
   /**
   /**
    * Class to manage a list of {@link ServiceStateChangeListener} instances,
    * Class to manage a list of {@link ServiceStateChangeListener} instances,

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java

@@ -166,7 +166,7 @@ public class TraceAdmin extends Configured implements Tool {
       System.err.println("You must specify a host with -host.");
       System.err.println("You must specify a host with -host.");
       return 1;
       return 1;
     }
     }
-    if (args.size() < 0) {
+    if (args.isEmpty()) {
       System.err.println("You must specify an operation.");
       System.err.println("You must specify an operation.");
       return 1;
       return 1;
     }
     }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java

@@ -42,8 +42,8 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 import org.xml.sax.SAXException;
 
 
-// Keeps track of which datanodes/tasktrackers are allowed to connect to the 
-// namenode/jobtracker.
+// Keeps track of which datanodes/nodemanagers are allowed to connect to the
+// namenode/resourcemanager.
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
 public class HostsFileReader {
 public class HostsFileReader {

+ 6 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedLock.java

@@ -25,9 +25,9 @@ import java.util.concurrent.locks.ReentrantLock;
 
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.commons.logging.Log;
 
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
 
 
 /**
 /**
  * This is a debugging class that can be used by callers to track
  * This is a debugging class that can be used by callers to track
@@ -44,7 +44,7 @@ import com.google.common.annotations.VisibleForTesting;
 public class InstrumentedLock implements Lock {
 public class InstrumentedLock implements Lock {
 
 
   private final Lock lock;
   private final Lock lock;
-  private final Log logger;
+  private final Logger logger;
   private final String name;
   private final String name;
   private final Timer clock;
   private final Timer clock;
 
 
@@ -70,20 +70,20 @@ public class InstrumentedLock implements Lock {
    * @param lockWarningThresholdMs the time threshold to view lock held
    * @param lockWarningThresholdMs the time threshold to view lock held
    *                               time as being "too long"
    *                               time as being "too long"
    */
    */
-  public InstrumentedLock(String name, Log logger, long minLoggingGapMs,
-      long lockWarningThresholdMs) {
+  public InstrumentedLock(String name, Logger logger, long minLoggingGapMs,
+                          long lockWarningThresholdMs) {
     this(name, logger, new ReentrantLock(),
     this(name, logger, new ReentrantLock(),
         minLoggingGapMs, lockWarningThresholdMs);
         minLoggingGapMs, lockWarningThresholdMs);
   }
   }
 
 
-  public InstrumentedLock(String name, Log logger, Lock lock,
+  public InstrumentedLock(String name, Logger logger, Lock lock,
       long minLoggingGapMs, long lockWarningThresholdMs) {
       long minLoggingGapMs, long lockWarningThresholdMs) {
     this(name, logger, lock,
     this(name, logger, lock,
         minLoggingGapMs, lockWarningThresholdMs, new Timer());
         minLoggingGapMs, lockWarningThresholdMs, new Timer());
   }
   }
 
 
   @VisibleForTesting
   @VisibleForTesting
-  InstrumentedLock(String name, Log logger, Lock lock,
+  InstrumentedLock(String name, Logger logger, Lock lock,
       long minLoggingGapMs, long lockWarningThresholdMs, Timer clock) {
       long minLoggingGapMs, long lockWarningThresholdMs, Timer clock) {
     this.name = name;
     this.name = name;
     this.lock = lock;
     this.lock = lock;

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedReadLock.java

@@ -19,11 +19,11 @@ package org.apache.hadoop.util;
 
 
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
 
 
 /**
 /**
  * This is a wrap class of a <tt>ReadLock</tt>.
  * This is a wrap class of a <tt>ReadLock</tt>.
@@ -51,7 +51,7 @@ public class InstrumentedReadLock extends InstrumentedLock {
     };
     };
   };
   };
 
 
-  public InstrumentedReadLock(String name, Log logger,
+  public InstrumentedReadLock(String name, Logger logger,
       ReentrantReadWriteLock readWriteLock,
       ReentrantReadWriteLock readWriteLock,
       long minLoggingGapMs, long lockWarningThresholdMs) {
       long minLoggingGapMs, long lockWarningThresholdMs) {
     this(name, logger, readWriteLock, minLoggingGapMs, lockWarningThresholdMs,
     this(name, logger, readWriteLock, minLoggingGapMs, lockWarningThresholdMs,
@@ -59,7 +59,7 @@ public class InstrumentedReadLock extends InstrumentedLock {
   }
   }
 
 
   @VisibleForTesting
   @VisibleForTesting
-  InstrumentedReadLock(String name, Log logger,
+  InstrumentedReadLock(String name, Logger logger,
       ReentrantReadWriteLock readWriteLock,
       ReentrantReadWriteLock readWriteLock,
       long minLoggingGapMs, long lockWarningThresholdMs, Timer clock) {
       long minLoggingGapMs, long lockWarningThresholdMs, Timer clock) {
     super(name, logger, readWriteLock.readLock(), minLoggingGapMs,
     super(name, logger, readWriteLock.readLock(), minLoggingGapMs,

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedReadWriteLock.java

@@ -21,9 +21,9 @@ import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.slf4j.Logger;
 
 
 /**
 /**
  * This is a wrap class of a {@link ReentrantReadWriteLock}.
  * This is a wrap class of a {@link ReentrantReadWriteLock}.
@@ -37,7 +37,7 @@ public class InstrumentedReadWriteLock implements ReadWriteLock {
   private final Lock readLock;
   private final Lock readLock;
   private final Lock writeLock;
   private final Lock writeLock;
 
 
-  InstrumentedReadWriteLock(boolean fair, String name, Log logger,
+  InstrumentedReadWriteLock(boolean fair, String name, Logger logger,
       long minLoggingGapMs, long lockWarningThresholdMs) {
       long minLoggingGapMs, long lockWarningThresholdMs) {
     ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock(fair);
     ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock(fair);
     readLock = new InstrumentedReadLock(name, logger, readWriteLock,
     readLock = new InstrumentedReadLock(name, logger, readWriteLock,

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/InstrumentedWriteLock.java

@@ -19,11 +19,11 @@ package org.apache.hadoop.util;
 
 
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 
-import org.apache.commons.logging.Log;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
 
 
 /**
 /**
  * This is a wrap class of a <tt>WriteLock</tt>.
  * This is a wrap class of a <tt>WriteLock</tt>.
@@ -37,7 +37,7 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceStability.Unstable
 @InterfaceStability.Unstable
 public class InstrumentedWriteLock extends InstrumentedLock {
 public class InstrumentedWriteLock extends InstrumentedLock {
 
 
-  public InstrumentedWriteLock(String name, Log logger,
+  public InstrumentedWriteLock(String name, Logger logger,
       ReentrantReadWriteLock readWriteLock,
       ReentrantReadWriteLock readWriteLock,
       long minLoggingGapMs, long lockWarningThresholdMs) {
       long minLoggingGapMs, long lockWarningThresholdMs) {
     this(name, logger, readWriteLock, minLoggingGapMs, lockWarningThresholdMs,
     this(name, logger, readWriteLock, minLoggingGapMs, lockWarningThresholdMs,
@@ -45,7 +45,7 @@ public class InstrumentedWriteLock extends InstrumentedLock {
   }
   }
 
 
   @VisibleForTesting
   @VisibleForTesting
-  InstrumentedWriteLock(String name, Log logger,
+  InstrumentedWriteLock(String name, Logger logger,
       ReentrantReadWriteLock readWriteLock,
       ReentrantReadWriteLock readWriteLock,
       long minLoggingGapMs, long lockWarningThresholdMs, Timer clock) {
       long minLoggingGapMs, long lockWarningThresholdMs, Timer clock) {
     super(name, logger, readWriteLock.writeLock(), minLoggingGapMs,
     super(name, logger, readWriteLock.writeLock(), minLoggingGapMs,

+ 4 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LogAdapter.java

@@ -32,6 +32,10 @@ class LogAdapter {
     this.LOGGER = LOGGER;
     this.LOGGER = LOGGER;
   }
   }
 
 
+  /**
+   * @deprecated use {@link #create(Logger)} instead
+   */
+  @Deprecated
   public static LogAdapter create(Log LOG) {
   public static LogAdapter create(Log LOG) {
     return new LogAdapter(LOG);
     return new LogAdapter(LOG);
   }
   }

+ 30 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java

@@ -46,6 +46,7 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.Deserializer;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.Serializer;
 import org.apache.hadoop.io.serializer.Serializer;
+import org.slf4j.Logger;
 
 
 /**
 /**
  * General reflection utils
  * General reflection utils
@@ -228,6 +229,35 @@ public class ReflectionUtils {
     }
     }
   }
   }
 
 
+  /**
+   * Log the current thread stacks at INFO level.
+   * @param log the logger that logs the stack trace
+   * @param title a descriptive title for the call stacks
+   * @param minInterval the minimum time from the last
+   */
+  public static void logThreadInfo(Logger log,
+                                   String title,
+                                   long minInterval) {
+    boolean dumpStack = false;
+    if (log.isInfoEnabled()) {
+      synchronized (ReflectionUtils.class) {
+        long now = Time.now();
+        if (now - previousLogTime >= minInterval * 1000) {
+          previousLogTime = now;
+          dumpStack = true;
+        }
+      }
+      if (dumpStack) {
+        try {
+          ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+          printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title);
+          log.info(buffer.toString(Charset.defaultCharset().name()));
+        } catch (UnsupportedEncodingException ignored) {
+        }
+      }
+    }
+  }
+
   /**
   /**
    * Return the correctly-typed {@link Class} of the given object.
    * Return the correctly-typed {@link Class} of the given object.
    *  
    *  

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java

@@ -169,7 +169,7 @@ public class SysInfoWindows extends SysInfo {
 
 
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
-  public int getNumProcessors() {
+  public synchronized int getNumProcessors() {
     refreshIfNeeded();
     refreshIfNeeded();
     return numProcessors;
     return numProcessors;
   }
   }
@@ -196,7 +196,7 @@ public class SysInfoWindows extends SysInfo {
 
 
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
-  public float getCpuUsagePercentage() {
+  public synchronized float getCpuUsagePercentage() {
     refreshIfNeeded();
     refreshIfNeeded();
     float ret = cpuUsage;
     float ret = cpuUsage;
     if (ret != -1) {
     if (ret != -1) {
@@ -207,7 +207,7 @@ public class SysInfoWindows extends SysInfo {
 
 
   /** {@inheritDoc} */
   /** {@inheritDoc} */
   @Override
   @Override
-  public float getNumVCoresUsed() {
+  public synchronized float getNumVCoresUsed() {
     refreshIfNeeded();
     refreshIfNeeded();
     float ret = cpuUsage;
     float ret = cpuUsage;
     if (ret != -1) {
     if (ret != -1) {

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_decoder.c

@@ -31,7 +31,7 @@
 typedef struct _XOREncoder {
 typedef struct _XOREncoder {
   IsalCoder isalCoder;
   IsalCoder isalCoder;
   unsigned char* inputs[MMAX];
   unsigned char* inputs[MMAX];
-  unsigned char* outputs[1];
+  unsigned char* outputs[KMAX];
 } XORDecoder;
 } XORDecoder;
 
 
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL
@@ -58,8 +58,7 @@ Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeXORRawDecoder_decodeImpl(
   numParityUnits = ((IsalCoder*)xorDecoder)->numParityUnits;
   numParityUnits = ((IsalCoder*)xorDecoder)->numParityUnits;
   chunkSize = (int)dataLen;
   chunkSize = (int)dataLen;
 
 
-  getInputs(env, inputs, inputOffsets, xorDecoder->inputs,
-                                               numDataUnits + numParityUnits);
+  getInputs(env, inputs, inputOffsets, xorDecoder->inputs, numDataUnits);
   getOutputs(env, outputs, outputOffsets, xorDecoder->outputs, numParityUnits);
   getOutputs(env, outputs, outputOffsets, xorDecoder->outputs, numParityUnits);
 
 
   for (i = 0; i < numDataUnits + numParityUnits; i++) {
   for (i = 0; i < numDataUnits + numParityUnits; i++) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_encoder.c

@@ -31,7 +31,7 @@
 typedef struct _XOREncoder {
 typedef struct _XOREncoder {
   IsalCoder isalCoder;
   IsalCoder isalCoder;
   unsigned char* inputs[MMAX];
   unsigned char* inputs[MMAX];
-  unsigned char* outputs[1];
+  unsigned char* outputs[KMAX];
 } XOREncoder;
 } XOREncoder;
 
 
 JNIEXPORT void JNICALL
 JNIEXPORT void JNICALL

+ 20 - 0
hadoop-common-project/hadoop-common/src/main/resources/core-default.xml

@@ -346,6 +346,26 @@
   </description>
   </description>
 </property>
 </property>
 
 
+<property>
+  <name>hadoop.security.group.mapping.ldap.userbase</name>
+  <value></value>
+  <description>
+    The search base for the LDAP connection for user search query. This is a
+    distinguished name, and its the root of the LDAP directory for users.
+    If not set, hadoop.security.group.mapping.ldap.base is used.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.security.group.mapping.ldap.groupbase</name>
+  <value></value>
+  <description>
+    The search base for the LDAP connection for group search . This is a
+    distinguished name, and its the root of the LDAP directory for groups.
+    If not set, hadoop.security.group.mapping.ldap.base is used.
+  </description>
+</property>
+
 <property>
 <property>
   <name>hadoop.security.group.mapping.ldap.search.filter.user</name>
   <name>hadoop.security.group.mapping.ldap.search.filter.user</name>
   <value>(&amp;(objectClass=user)(sAMAccountName={0}))</value>
   <value>(&amp;(objectClass=user)(sAMAccountName={0}))</value>

+ 11 - 2
hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md

@@ -141,7 +141,7 @@ Similar to get command, except that the destination is restricted to a local fil
 count
 count
 -----
 -----
 
 
-Usage: `hadoop fs -count [-q] [-h] [-v] [-x] [-t [<storage type>]] [-u] <paths> `
+Usage: `hadoop fs -count [-q] [-h] [-v] [-x] [-t [<storage type>]] [-u] [-e] <paths> `
 
 
 Count the number of directories, files and bytes under the paths that match the specified file pattern. Get the quota and the usage. The output columns with -count are: DIR\_COUNT, FILE\_COUNT, CONTENT\_SIZE, PATHNAME
 Count the number of directories, files and bytes under the paths that match the specified file pattern. Get the quota and the usage. The output columns with -count are: DIR\_COUNT, FILE\_COUNT, CONTENT\_SIZE, PATHNAME
 
 
@@ -159,6 +159,12 @@ The -v option displays a header line.
 
 
 The -x option excludes snapshots from the result calculation. Without the -x option (default), the result is always calculated from all INodes, including all snapshots under the given path. The -x option is ignored if -u or -q option is given.
 The -x option excludes snapshots from the result calculation. Without the -x option (default), the result is always calculated from all INodes, including all snapshots under the given path. The -x option is ignored if -u or -q option is given.
 
 
+The -e option shows the erasure coding policy for each file.
+
+The output columns with -count -e are: DIR\_COUNT, FILE\_COUNT, CONTENT_SIZE, ERASURECODING\_POLICY, PATHNAME
+
+The ERASURECODING\_POLICY is name of the policy for the file. If a erasure coding policy is setted on that file, it will return name of the policy. If no erasure coding policy is setted, it will return \"Replicated\" which means it use replication storage strategy.
+
 Example:
 Example:
 
 
 * `hadoop fs -count hdfs://nn1.example.com/file1 hdfs://nn2.example.com/file2`
 * `hadoop fs -count hdfs://nn1.example.com/file1 hdfs://nn2.example.com/file2`
@@ -168,6 +174,7 @@ Example:
 * `hadoop fs -count -u hdfs://nn1.example.com/file1`
 * `hadoop fs -count -u hdfs://nn1.example.com/file1`
 * `hadoop fs -count -u -h hdfs://nn1.example.com/file1`
 * `hadoop fs -count -u -h hdfs://nn1.example.com/file1`
 * `hadoop fs -count -u -h -v hdfs://nn1.example.com/file1`
 * `hadoop fs -count -u -h -v hdfs://nn1.example.com/file1`
+* `hadoop fs -count -e hdfs://nn1.example.com/file1`
 
 
 Exit Code:
 Exit Code:
 
 
@@ -403,7 +410,7 @@ Return usage output.
 ls
 ls
 ----
 ----
 
 
-Usage: `hadoop fs -ls [-C] [-d] [-h] [-q] [-R] [-t] [-S] [-r] [-u] <args> `
+Usage: `hadoop fs -ls [-C] [-d] [-h] [-q] [-R] [-t] [-S] [-r] [-u] [-e] <args> `
 
 
 Options:
 Options:
 
 
@@ -416,6 +423,7 @@ Options:
 * -S: Sort output by file size.
 * -S: Sort output by file size.
 * -r: Reverse the sort order.
 * -r: Reverse the sort order.
 * -u: Use access time rather than modification time for display and sorting.  
 * -u: Use access time rather than modification time for display and sorting.  
+* -e: Display the erasure coding policy of files and directories only.
 
 
 For a file ls returns stat on the file with the following format:
 For a file ls returns stat on the file with the following format:
 
 
@@ -430,6 +438,7 @@ Files within a directory are order by filename by default.
 Example:
 Example:
 
 
 * `hadoop fs -ls /user/hadoop/file1`
 * `hadoop fs -ls /user/hadoop/file1`
+* `hadoop fs -ls -e /ecdir`
 
 
 Exit Code:
 Exit Code:
 
 

+ 1 - 0
hadoop-common-project/hadoop-common/src/site/markdown/GroupsMapping.md

@@ -76,6 +76,7 @@ This provider supports LDAP with simple password authentication using JNDI API.
 `hadoop.security.group.mapping.ldap.url` must be set. This refers to the URL of the LDAP server for resolving user groups.
 `hadoop.security.group.mapping.ldap.url` must be set. This refers to the URL of the LDAP server for resolving user groups.
 
 
 `hadoop.security.group.mapping.ldap.base` configures the search base for the LDAP connection. This is a distinguished name, and will typically be the root of the LDAP directory.
 `hadoop.security.group.mapping.ldap.base` configures the search base for the LDAP connection. This is a distinguished name, and will typically be the root of the LDAP directory.
+Get groups for a given username first looks up the user and then looks up the groups for the user result. If the directory setup has different user and group search bases, use `hadoop.security.group.mapping.ldap.userbase` and `hadoop.security.group.mapping.ldap.groupbase` configs.
 
 
 If the LDAP server does not support anonymous binds,
 If the LDAP server does not support anonymous binds,
 set the distinguished name of the user to bind in `hadoop.security.group.mapping.ldap.bind.user`.
 set the distinguished name of the user to bind in `hadoop.security.group.mapping.ldap.bind.user`.

+ 4 - 0
hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md

@@ -1185,6 +1185,10 @@ on (possibly remote) filesystems. These filesystems are invariably accessed
 concurrently; the state of the filesystem MAY change between a `hasNext()`
 concurrently; the state of the filesystem MAY change between a `hasNext()`
 probe and the invocation of the `next()` call.
 probe and the invocation of the `next()` call.
 
 
+During iteration through a `RemoteIterator`, if the directory is deleted on
+remote filesystem, then `hasNext()` or `next()` call may throw
+`FileNotFoundException`.
+
 Accordingly, a robust iteration through a `RemoteIterator` would catch and
 Accordingly, a robust iteration through a `RemoteIterator` would catch and
 discard `NoSuchElementException` exceptions raised during the process, which
 discard `NoSuchElementException` exceptions raised during the process, which
 could be done through the `while(true)` iteration example above, or
 could be done through the `while(true)` iteration example above, or

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java

@@ -1,4 +1,4 @@
-/**
+ /**
  * Licensed to the Apache Software Foundation (ASF) under one
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
  * distributed with this work for additional information

+ 82 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java

@@ -30,6 +30,7 @@ import java.io.StringWriter;
 import java.net.InetAddress;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collection;
@@ -99,6 +100,18 @@ public class TestConfiguration extends TestCase {
     out.write("<configuration>\n");
     out.write("<configuration>\n");
   }
   }
 
 
+  private void writeHeader() throws IOException{
+    out.write("<?xml version=\"1.0\"?>\n");
+  }
+
+  private void writeHeader(String encoding) throws IOException{
+    out.write("<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>\n");
+  }
+
+  private void writeConfiguration() throws IOException{
+    out.write("<configuration>\n");
+  }
+
   private void endConfig() throws IOException{
   private void endConfig() throws IOException{
     out.write("</configuration>\n");
     out.write("</configuration>\n");
     out.close();
     out.close();
@@ -120,6 +133,18 @@ public class TestConfiguration extends TestCase {
     out.write("</xi:fallback>\n ");
     out.write("</xi:fallback>\n ");
   }
   }
 
 
+  private void declareEntity(String root, String entity, String value)
+      throws IOException {
+    out.write("<!DOCTYPE " + root
+        + " [\n<!ENTITY " + entity + " \"" + value + "\">\n]>");
+  }
+
+  private void declareSystemEntity(String root, String entity, String value)
+      throws IOException {
+    out.write("<!DOCTYPE " + root
+        + " [\n<!ENTITY " + entity + " SYSTEM \"" + value + "\">\n]>");
+  }
+
   public void testInputStreamResource() throws Exception {
   public void testInputStreamResource() throws Exception {
     StringWriter writer = new StringWriter();
     StringWriter writer = new StringWriter();
     out = new BufferedWriter(writer);
     out = new BufferedWriter(writer);
@@ -550,6 +575,63 @@ public class TestConfiguration extends TestCase {
     tearDown();
     tearDown();
   }
   }
 
 
+  public void testCharsetInDocumentEncoding() throws Exception {
+    tearDown();
+    out=new BufferedWriter(new OutputStreamWriter(new FileOutputStream(CONFIG),
+        StandardCharsets.ISO_8859_1));
+    writeHeader(StandardCharsets.ISO_8859_1.displayName());
+    writeConfiguration();
+    appendProperty("a", "b");
+    appendProperty("c", "Müller");
+    endConfig();
+
+    // verify that the includes file contains all properties
+    Path fileResource = new Path(CONFIG);
+    conf.addResource(fileResource);
+    assertEquals(conf.get("a"), "b");
+    assertEquals(conf.get("c"), "Müller");
+    tearDown();
+  }
+
+  public void testEntityReference() throws Exception {
+    tearDown();
+    out=new BufferedWriter(new FileWriter(CONFIG));
+    writeHeader();
+    declareEntity("configuration", "d", "d");
+    writeConfiguration();
+    appendProperty("a", "b");
+    appendProperty("c", "&d;");
+    endConfig();
+
+    // verify that the includes file contains all properties
+    Path fileResource = new Path(CONFIG);
+    conf.addResource(fileResource);
+    assertEquals(conf.get("a"), "b");
+    assertEquals(conf.get("c"), "d");
+    tearDown();
+  }
+
+  public void testSystemEntityReference() throws Exception {
+    tearDown();
+    out=new BufferedWriter(new FileWriter(CONFIG2));
+    out.write("d");
+    out.close();
+    out=new BufferedWriter(new FileWriter(CONFIG));
+    writeHeader();
+    declareSystemEntity("configuration", "d", CONFIG2);
+    writeConfiguration();
+    appendProperty("a", "b");
+    appendProperty("c", "&d;");
+    endConfig();
+
+    // verify that the includes file contains all properties
+    Path fileResource = new Path(CONFIG);
+    conf.addResource(fileResource);
+    assertEquals(conf.get("a"), "b");
+    assertEquals(conf.get("c"), "d");
+    tearDown();
+  }
+
   public void testIncludesWithFallback() throws Exception {
   public void testIncludesWithFallback() throws Exception {
     tearDown();
     tearDown();
     out=new BufferedWriter(new FileWriter(CONFIG2));
     out=new BufferedWriter(new FileWriter(CONFIG2));

+ 30 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoStreamsWithOpensslAesCtrCryptoCodec.java

@@ -18,12 +18,17 @@
 package org.apache.hadoop.crypto;
 package org.apache.hadoop.crypto;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.random.OsSecureRandom;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.BeforeClass;
 import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.internal.util.reflection.Whitebox;
 
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 
 
 public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec 
 public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec 
     extends TestCryptoStreams {
     extends TestCryptoStreams {
@@ -32,8 +37,7 @@ public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec
   public static void init() throws Exception {
   public static void init() throws Exception {
     GenericTestUtils.assumeInNativeProfile();
     GenericTestUtils.assumeInNativeProfile();
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
-    conf.set(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY,
+    conf.set(HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY,
         OpensslAesCtrCryptoCodec.class.getName());
         OpensslAesCtrCryptoCodec.class.getName());
     codec = CryptoCodec.getInstance(conf);
     codec = CryptoCodec.getInstance(conf);
     assertNotNull("Unable to instantiate codec " +
     assertNotNull("Unable to instantiate codec " +
@@ -42,4 +46,28 @@ public class TestCryptoStreamsWithOpensslAesCtrCryptoCodec
     assertEquals(OpensslAesCtrCryptoCodec.class.getCanonicalName(),
     assertEquals(OpensslAesCtrCryptoCodec.class.getCanonicalName(),
         codec.getClass().getCanonicalName());
         codec.getClass().getCanonicalName());
   }
   }
+
+  @Test
+  public void testCodecClosesRandom() throws Exception {
+    GenericTestUtils.assumeInNativeProfile();
+    Configuration conf = new Configuration();
+    conf.set(HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY,
+        OpensslAesCtrCryptoCodec.class.getName());
+    conf.set(
+        CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_IMPL_KEY,
+        OsSecureRandom.class.getName());
+    CryptoCodec codecWithRandom = CryptoCodec.getInstance(conf);
+    assertNotNull(
+        "Unable to instantiate codec " + OpensslAesCtrCryptoCodec.class
+            .getName() + ", is the required " + "version of OpenSSL installed?",
+        codecWithRandom);
+    OsSecureRandom random =
+        (OsSecureRandom) Whitebox.getInternalState(codecWithRandom, "random");
+    // trigger the OsSecureRandom to create an internal FileInputStream
+    random.nextBytes(new byte[10]);
+    assertNotNull(Whitebox.getInternalState(random, "stream"));
+    // verify closing the codec closes the codec's random's stream.
+    codecWithRandom.close();
+    assertNull(Whitebox.getInternalState(random, "stream"));
+  }
 }
 }

+ 5 - 5
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

@@ -659,9 +659,9 @@ public class TestLocalFileSystem {
 
 
     try {
     try {
       FSDataOutputStreamBuilder builder =
       FSDataOutputStreamBuilder builder =
-          fileSys.newFSDataOutputStreamBuilder(path);
+          fileSys.createFile(path);
       FSDataOutputStream out = builder.build();
       FSDataOutputStream out = builder.build();
-      String content = "Create with a generic type of createBuilder!";
+      String content = "Create with a generic type of createFile!";
       byte[] contentOrigin = content.getBytes("UTF8");
       byte[] contentOrigin = content.getBytes("UTF8");
       out.write(contentOrigin);
       out.write(contentOrigin);
       out.close();
       out.close();
@@ -680,7 +680,7 @@ public class TestLocalFileSystem {
     // Test value not being set for replication, block size, buffer size
     // Test value not being set for replication, block size, buffer size
     // and permission
     // and permission
     FSDataOutputStreamBuilder builder =
     FSDataOutputStreamBuilder builder =
-        fileSys.newFSDataOutputStreamBuilder(path);
+        fileSys.createFile(path);
     builder.build();
     builder.build();
     Assert.assertEquals("Should be default block size",
     Assert.assertEquals("Should be default block size",
         builder.getBlockSize(), fileSys.getDefaultBlockSize());
         builder.getBlockSize(), fileSys.getDefaultBlockSize());
@@ -694,8 +694,8 @@ public class TestLocalFileSystem {
         builder.getPermission(), FsPermission.getFileDefault());
         builder.getPermission(), FsPermission.getFileDefault());
 
 
     // Test set 0 to replication, block size and buffer size
     // Test set 0 to replication, block size and buffer size
-    builder = fileSys.newFSDataOutputStreamBuilder(path);
-    builder.setBufferSize(0).setBlockSize(0).setReplication((short) 0);
+    builder = fileSys.createFile(path);
+    builder.bufferSize(0).blockSize(0).replication((short) 0);
     Assert.assertEquals("Block size should be 0",
     Assert.assertEquals("Block size should be 0",
         builder.getBlockSize(), 0);
         builder.getBlockSize(), 0);
     Assert.assertEquals("Replication factor should be 0",
     Assert.assertEquals("Replication factor should be 0",

+ 18 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractCreateTest.java

@@ -32,6 +32,7 @@ import java.io.IOException;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.getFileStatusEventually;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.getFileStatusEventually;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
+import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeTextFile;
 import static org.apache.hadoop.fs.contract.ContractTestUtils.writeTextFile;
 
 
@@ -272,4 +273,21 @@ public abstract class AbstractContractCreateTest extends
         defaultBlockSize >= minValue);
         defaultBlockSize >= minValue);
   }
   }
 
 
+  @Test
+  public void testCreateMakesParentDirs() throws Throwable {
+    describe("check that after creating a file its parent directories exist");
+    FileSystem fs = getFileSystem();
+    Path grandparent = path("testCreateCreatesAndPopulatesParents");
+    Path parent = new Path(grandparent, "parent");
+    Path child = new Path(parent, "child");
+    touch(fs, child);
+    assertEquals("List status of parent should include the 1 child file",
+        1, fs.listStatus(parent).length);
+    assertTrue("Parent directory does not appear to be a directory",
+        fs.getFileStatus(parent).isDirectory());
+    assertEquals("List status of grandparent should include the 1 parent dir",
+        1, fs.listStatus(grandparent).length);
+    assertTrue("Grandparent directory does not appear to be a directory",
+        fs.getFileStatus(grandparent).isDirectory());
+  }
 }
 }

+ 55 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java

@@ -17,14 +17,18 @@
  */
  */
 package org.apache.hadoop.fs.ftp;
 package org.apache.hadoop.fs.ftp;
 
 
+import com.google.common.base.Preconditions;
 import org.apache.commons.net.ftp.FTP;
 import org.apache.commons.net.ftp.FTP;
 
 
 import org.apache.commons.net.ftp.FTPClient;
 import org.apache.commons.net.ftp.FTPClient;
+import org.apache.commons.net.ftp.FTPFile;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.junit.Rule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.Test;
 import org.junit.rules.Timeout;
 import org.junit.rules.Timeout;
 
 
+
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
 
 
 /**
 /**
@@ -82,4 +86,55 @@ public class TestFTPFileSystem {
         client.getDataConnectionMode());
         client.getDataConnectionMode());
 
 
   }
   }
+
+  @Test
+  public void testGetFsAction(){
+    FTPFileSystem ftp = new FTPFileSystem();
+    int[] accesses = new int[] {FTPFile.USER_ACCESS, FTPFile.GROUP_ACCESS,
+        FTPFile.WORLD_ACCESS};
+    FsAction[] actions = FsAction.values();
+    for(int i = 0; i < accesses.length; i++){
+      for(int j = 0; j < actions.length; j++){
+        enhancedAssertEquals(actions[j], ftp.getFsAction(accesses[i],
+            getFTPFileOf(accesses[i], actions[j])));
+      }
+    }
+  }
+
+  private void enhancedAssertEquals(FsAction actionA, FsAction actionB){
+    String notNullErrorMessage = "FsAction cannot be null here.";
+    Preconditions.checkNotNull(actionA, notNullErrorMessage);
+    Preconditions.checkNotNull(actionB, notNullErrorMessage);
+    String errorMessageFormat = "expect FsAction is %s, whereas it is %s now.";
+    String notEqualErrorMessage = String.format(errorMessageFormat,
+        actionA.name(), actionB.name());
+    assertEquals(notEqualErrorMessage, actionA, actionB);
+  }
+
+  private FTPFile getFTPFileOf(int access, FsAction action) {
+    boolean check = access == FTPFile.USER_ACCESS ||
+                      access == FTPFile.GROUP_ACCESS ||
+                      access == FTPFile.WORLD_ACCESS;
+    String errorFormat = "access must be in [%d,%d,%d], but it is %d now.";
+    String errorMessage = String.format(errorFormat, FTPFile.USER_ACCESS,
+         FTPFile.GROUP_ACCESS, FTPFile.WORLD_ACCESS, access);
+    Preconditions.checkArgument(check, errorMessage);
+    Preconditions.checkNotNull(action);
+    FTPFile ftpFile = new FTPFile();
+
+    if(action.implies(FsAction.READ)){
+      ftpFile.setPermission(access, FTPFile.READ_PERMISSION, true);
+    }
+
+    if(action.implies(FsAction.WRITE)){
+      ftpFile.setPermission(access, FTPFile.WRITE_PERMISSION, true);
+    }
+
+    if(action.implies(FsAction.EXECUTE)){
+      ftpFile.setPermission(access, FTPFile.EXECUTE_PERMISSION, true);
+    }
+
+    return ftpFile;
+  }
+
 }
 }

+ 1 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java

@@ -315,6 +315,7 @@ public class TestSFTPFileSystem {
     java.nio.file.Path path = (local).pathToFile(file).toPath();
     java.nio.file.Path path = (local).pathToFile(file).toPath();
     long accessTime1 = Files.readAttributes(path, BasicFileAttributes.class)
     long accessTime1 = Files.readAttributes(path, BasicFileAttributes.class)
         .lastAccessTime().toMillis();
         .lastAccessTime().toMillis();
+    accessTime1 = (accessTime1 / 1000) * 1000;
     long accessTime2 = sftpFs.getFileStatus(file).getAccessTime();
     long accessTime2 = sftpFs.getFileStatus(file).getAccessTime();
     assertEquals(accessTime1, accessTime2);
     assertEquals(accessTime1, accessTime2);
   }
   }

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java

@@ -76,10 +76,10 @@ public class TestCopyPreserveFlag {
         output.writeChar('\n');
         output.writeChar('\n');
     }
     }
     output.close();
     output.close();
-    fs.setTimes(FROM, MODIFICATION_TIME, ACCESS_TIME);
     fs.setPermission(FROM, PERMISSIONS);
     fs.setPermission(FROM, PERMISSIONS);
-    fs.setTimes(DIR_FROM, MODIFICATION_TIME, ACCESS_TIME);
+    fs.setTimes(FROM, MODIFICATION_TIME, ACCESS_TIME);
     fs.setPermission(DIR_FROM, PERMISSIONS);
     fs.setPermission(DIR_FROM, PERMISSIONS);
+    fs.setTimes(DIR_FROM, MODIFICATION_TIME, ACCESS_TIME);
   }
   }
 
 
   @After
   @After

+ 3 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java

@@ -447,7 +447,7 @@ public class TestCount {
     Count count = new Count();
     Count count = new Count();
     String actual = count.getUsage();
     String actual = count.getUsage();
     String expected =
     String expected =
-        "-count [-q] [-h] [-v] [-t [<storage type>]] [-u] [-x] <path> ...";
+        "-count [-q] [-h] [-v] [-t [<storage type>]] [-u] [-x] [-e] <path> ...";
     assertEquals("Count.getUsage", expected, actual);
     assertEquals("Count.getUsage", expected, actual);
   }
   }
 
 
@@ -478,7 +478,8 @@ public class TestCount {
         + "It can also pass the value '', 'all' or 'ALL' to specify all the "
         + "It can also pass the value '', 'all' or 'ALL' to specify all the "
         + "storage types.\n"
         + "storage types.\n"
         + "The -u option shows the quota and \n"
         + "The -u option shows the quota and \n"
-        + "the usage against the quota without the detailed content summary.";
+        + "the usage against the quota without the detailed content summary."
+        + "The -e option shows the erasure coding policy.";
 
 
     assertEquals("Count.getDescription", expected, actual);
     assertEquals("Count.getDescription", expected, actual);
   }
   }

+ 28 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java

@@ -30,7 +30,9 @@ import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.State;
 import org.apache.hadoop.ha.ActiveStandbyElector.State;
 import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.log4j.Level;
 import org.apache.log4j.Level;
+import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.ZooDefs.Ids;
 import org.apache.zookeeper.ZooDefs.Ids;
+import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.server.ZooKeeperServer;
 import org.apache.zookeeper.server.ZooKeeperServer;
 import org.junit.Test;
 import org.junit.Test;
 import org.mockito.AdditionalMatchers;
 import org.mockito.AdditionalMatchers;
@@ -256,4 +258,30 @@ public class TestActiveStandbyElectorRealZK extends ClientBaseWithFixes {
     Mockito.verify(cbs[1], Mockito.never()).becomeActive();
     Mockito.verify(cbs[1], Mockito.never()).becomeActive();
     checkFatalsAndReset();
     checkFatalsAndReset();
   }
   }
+
+  /**
+   * Test to verify that proper ZooKeeper ACLs can be updated on
+   * ActiveStandbyElector's parent znode.
+   */
+  @Test(timeout = 15000)
+  public void testSetZooKeeperACLsOnParentZnodeName()
+      throws Exception {
+    ActiveStandbyElectorCallback cb =
+        Mockito.mock(ActiveStandbyElectorCallback.class);
+    ActiveStandbyElector elector =
+        new ActiveStandbyElector(hostPort, 5000, PARENT_DIR,
+            Ids.READ_ACL_UNSAFE, Collections.<ZKAuthInfo>emptyList(), cb,
+            CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT);
+
+    // Simulate the case by pre-creating znode 'parentZnodeName'. Then updates
+    // znode's data so that data version will be increased to 1. Here znode's
+    // aversion is 0.
+    ZooKeeper otherClient = createClient();
+    otherClient.create(PARENT_DIR, "sample1".getBytes(), Ids.OPEN_ACL_UNSAFE,
+        CreateMode.PERSISTENT);
+    otherClient.setData(PARENT_DIR, "sample2".getBytes(), -1);
+    otherClient.close();
+
+    elector.ensureParentZNode();
+  }
 }
 }

+ 11 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java

@@ -18,11 +18,13 @@
 package org.apache.hadoop.io.erasurecode;
 package org.apache.hadoop.io.erasurecode;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawDecoder;
-import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSLegacyRawEncoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
+import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
 import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
@@ -55,10 +57,15 @@ public class TestCodecRawCoderMapping {
     // should return default raw coder of rs codec
     // should return default raw coder of rs codec
     RawErasureEncoder encoder = CodecUtil.createRawEncoder(
     RawErasureEncoder encoder = CodecUtil.createRawEncoder(
         conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
         conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
-    Assert.assertTrue(encoder instanceof RSRawEncoder);
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
     RawErasureDecoder decoder = CodecUtil.createRawDecoder(
         conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
         conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions);
-    Assert.assertTrue(decoder instanceof RSRawDecoder);
+    if (ErasureCodeNative.isNativeCodeLoaded()) {
+      Assert.assertTrue(encoder instanceof NativeRSRawEncoder);
+      Assert.assertTrue(decoder instanceof NativeRSRawDecoder);
+    } else {
+      Assert.assertTrue(encoder instanceof RSRawEncoder);
+      Assert.assertTrue(decoder instanceof RSRawDecoder);
+    }
 
 
     // should return default raw coder of rs-legacy codec
     // should return default raw coder of rs-legacy codec
     encoder = CodecUtil.createRawEncoder(conf,
     encoder = CodecUtil.createRawEncoder(conf,

+ 49 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/MetricsTestHelper.java

@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A helper class that can provide test cases access to package-private
+ * methods.
+ */
+public final class MetricsTestHelper {
+  public static final Logger LOG =
+      LoggerFactory.getLogger(MetricsTestHelper.class);
+
+  private MetricsTestHelper() {
+    //not called
+  }
+
+  /**
+   * Replace the rolling averages windows for a
+   * {@link MutableRollingAverages} metric.
+   *
+   */
+  public static void replaceRollingAveragesScheduler(
+      MutableRollingAverages mutableRollingAverages,
+      int numWindows, long interval, TimeUnit timeUnit) {
+    mutableRollingAverages.replaceScheduledTask(
+        numWindows, interval, timeUnit);
+  }
+}

+ 83 - 15
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestRollingAverages.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableRollingAverages.java

@@ -17,23 +17,30 @@
  */
  */
 package org.apache.hadoop.metrics2.lib;
 package org.apache.hadoop.metrics2.lib;
 
 
-import static org.apache.hadoop.metrics2.lib.Interns.info;
-import static org.apache.hadoop.test.MetricsAsserts.mockMetricsRecordBuilder;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Matchers.anyDouble;
-import static org.mockito.Matchers.eq;
-
+import com.google.common.base.Supplier;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.test.GenericTestUtils;
+
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Time;
+import org.junit.Assert;
 import org.junit.Test;
 import org.junit.Test;
 
 
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.metrics2.lib.Interns.info;
+import static org.apache.hadoop.test.MetricsAsserts.*;
+import static org.mockito.Matchers.anyDouble;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.*;
+
 /**
 /**
  * This class tests various cases of the algorithms implemented in
  * This class tests various cases of the algorithms implemented in
- * {@link RollingAverages}.
+ * {@link MutableRollingAverages}.
  */
  */
-public class TestRollingAverages {
+public class TestMutableRollingAverages {
+
   /**
   /**
    * Tests if the results are correct if no samples are inserted, dry run of
    * Tests if the results are correct if no samples are inserted, dry run of
    * empty roll over.
    * empty roll over.
@@ -42,8 +49,9 @@ public class TestRollingAverages {
   public void testRollingAveragesEmptyRollover() throws Exception {
   public void testRollingAveragesEmptyRollover() throws Exception {
     final MetricsRecordBuilder rb = mockMetricsRecordBuilder();
     final MetricsRecordBuilder rb = mockMetricsRecordBuilder();
     /* 5s interval and 2 windows */
     /* 5s interval and 2 windows */
-    try (RollingAverages rollingAverages =
-             new RollingAverages(5000, 2)) {
+    try (MutableRollingAverages rollingAverages =
+             new MutableRollingAverages("Time")) {
+      rollingAverages.replaceScheduledTask(2, 5, TimeUnit.SECONDS);
       /* Check it initially */
       /* Check it initially */
       rollingAverages.snapshot(rb, true);
       rollingAverages.snapshot(rb, true);
       verify(rb, never()).addGauge(
       verify(rb, never()).addGauge(
@@ -78,9 +86,9 @@ public class TestRollingAverages {
     final int windowSizeMs = 5000; // 5s roll over interval
     final int windowSizeMs = 5000; // 5s roll over interval
     final int numWindows = 2;
     final int numWindows = 2;
     final int numOpsPerIteration = 1000;
     final int numOpsPerIteration = 1000;
-    try (RollingAverages rollingAverages = new RollingAverages(windowSizeMs,
-        numWindows)) {
-
+    try (MutableRollingAverages rollingAverages =
+             new MutableRollingAverages("Time")) {
+      rollingAverages.replaceScheduledTask(2, 5000, TimeUnit.MILLISECONDS);
       /* Push values for three intervals */
       /* Push values for three intervals */
       final long start = Time.monotonicNow();
       final long start = Time.monotonicNow();
       for (int i = 1; i <= 3; i++) {
       for (int i = 1; i <= 3; i++) {
@@ -121,4 +129,64 @@ public class TestRollingAverages {
       }
       }
     }
     }
   }
   }
+
+  /**
+   * Test that MutableRollingAverages gives expected results after
+   * initialization.
+   * @throws Exception
+   */
+  @Test(timeout = 30000)
+  public void testMutableRollingAveragesMetric() throws Exception {
+    DummyTestMetric testMetric = new DummyTestMetric();
+    testMetric.create();
+
+    testMetric.add("metric1", 100);
+    testMetric.add("metric1", 900);
+    testMetric.add("metric2", 1000);
+    testMetric.add("metric2", 1000);
+
+    GenericTestUtils.waitFor(new Supplier<Boolean>() {
+      @Override
+      public Boolean get() {
+        testMetric.collectThreadLocalStates();
+        return testMetric.getStats().size() > 0;
+      }
+    }, 500, 5000);
+
+    MetricsRecordBuilder rb = getMetrics(DummyTestMetric.METRIC_NAME);
+
+    double metric1Avg = getDoubleGauge("[Metric1]RollingAvgTesting", rb);
+    double metric2Avg = getDoubleGauge("[Metric2]RollingAvgTesting", rb);
+    Assert.assertTrue("The rolling average of metric1 is not as expected",
+        metric1Avg == 500.0);
+    Assert.assertTrue("The rolling average of metric2 is not as expected",
+        metric2Avg == 1000.0);
+
+  }
+
+  class DummyTestMetric {
+    @Metric (valueName = "testing")
+    private MutableRollingAverages rollingAverages;
+
+    static final String METRIC_NAME = "RollingAveragesTestMetric";
+
+    protected void create() {
+      DefaultMetricsSystem.instance().register(METRIC_NAME,
+          "mutable rolling averages test", this);
+      rollingAverages.replaceScheduledTask(10, 1000, TimeUnit.MILLISECONDS);
+    }
+
+    void add(String name, long latency) {
+      rollingAverages.add(name, latency);
+    }
+
+    void collectThreadLocalStates() {
+      rollingAverages.collectThreadLocalStates();
+    }
+
+    Map<String, Double> getStats() {
+      return rollingAverages.getStats(0);
+    }
+
+  }
 }
 }

+ 63 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestLdapGroupsMapping.java

@@ -80,10 +80,12 @@ public class TestLdapGroupsMapping extends TestLdapGroupsMappingBase {
   private static final byte[] AUTHENTICATE_SUCCESS_MSG =
   private static final byte[] AUTHENTICATE_SUCCESS_MSG =
       {48, 12, 2, 1, 1, 97, 7, 10, 1, 0, 4, 0, 4, 0};
       {48, 12, 2, 1, 1, 97, 7, 10, 1, 0, 4, 0, 4, 0};
 
 
+  private final String userDN = "CN=some_user,DC=test,DC=com";
+
   @Before
   @Before
   public void setupMocks() throws NamingException {
   public void setupMocks() throws NamingException {
     when(getUserSearchResult().getNameInNamespace()).
     when(getUserSearchResult().getNameInNamespace()).
-        thenReturn("CN=some_user,DC=test,DC=com");
+        thenReturn(userDN);
   }
   }
   
   
   @Test
   @Test
@@ -96,6 +98,66 @@ public class TestLdapGroupsMapping extends TestLdapGroupsMappingBase {
     doTestGetGroups(Arrays.asList(getTestGroups()), 2);
     doTestGetGroups(Arrays.asList(getTestGroups()), 2);
   }
   }
 
 
+  @Test
+  public void testGetGroupsWithDifferentBaseDNs() throws Exception {
+    Configuration conf = new Configuration();
+    // Set this, so we don't throw an exception
+    conf.set(LdapGroupsMapping.LDAP_URL_KEY, "ldap://test");
+    String userBaseDN = "ou=Users,dc=xxx,dc=com ";
+    String groupBaseDN = " ou=Groups,dc=xxx,dc=com";
+    conf.set(LdapGroupsMapping.USER_BASE_DN_KEY, userBaseDN);
+    conf.set(LdapGroupsMapping.GROUP_BASE_DN_KEY, groupBaseDN);
+
+    doTestGetGroupsWithBaseDN(conf, userBaseDN.trim(), groupBaseDN.trim());
+  }
+
+  @Test
+  public void testGetGroupsWithDefaultBaseDN() throws Exception {
+    Configuration conf = new Configuration();
+    // Set this, so we don't throw an exception
+    conf.set(LdapGroupsMapping.LDAP_URL_KEY, "ldap://test");
+    String baseDN = " dc=xxx,dc=com ";
+    conf.set(LdapGroupsMapping.BASE_DN_KEY, baseDN);
+    doTestGetGroupsWithBaseDN(conf, baseDN.trim(), baseDN.trim());
+  }
+
+  /**
+   * Helper method to do the LDAP getGroups operation using given user base DN
+   * and group base DN.
+   * @param conf The created configuration
+   * @param userBaseDN user base DN
+   * @param groupBaseDN group base DN
+   * @throws NamingException if error happens when getting groups
+   */
+  private void doTestGetGroupsWithBaseDN(Configuration conf, String userBaseDN,
+      String groupBaseDN) throws NamingException {
+    final LdapGroupsMapping groupsMapping = getGroupsMapping();
+    groupsMapping.setConf(conf);
+
+    final String userName = "some_user";
+
+    // The search functionality of the mock context is reused, so we will
+    // return the user NamingEnumeration first, and then the group
+    when(getContext().search(anyString(), anyString(), any(Object[].class),
+        any(SearchControls.class)))
+        .thenReturn(getUserNames(), getGroupNames());
+
+    List<String> groups = groupsMapping.getGroups(userName);
+    Assert.assertEquals(Arrays.asList(getTestGroups()), groups);
+
+    // We should have searched for the username and groups with default base dn
+    verify(getContext(), times(1)).search(userBaseDN,
+        LdapGroupsMapping.USER_SEARCH_FILTER_DEFAULT,
+        new Object[]{userName},
+        LdapGroupsMapping.SEARCH_CONTROLS);
+
+    verify(getContext(), times(1)).search(groupBaseDN,
+        "(&" + LdapGroupsMapping.GROUP_SEARCH_FILTER_DEFAULT + "(" +
+            LdapGroupsMapping.GROUP_MEMBERSHIP_ATTR_DEFAULT + "={0}))",
+        new Object[]{userDN},
+        LdapGroupsMapping.SEARCH_CONTROLS);
+  }
+
   @Test
   @Test
   public void testGetGroupsWithHierarchy() throws IOException, NamingException {
   public void testGetGroupsWithHierarchy() throws IOException, NamingException {
     // The search functionality of the mock context is reused, so we will
     // The search functionality of the mock context is reused, so we will

+ 65 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceOperations.java

@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.service;
+
+import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.PrintWriter;
+
+import static org.apache.hadoop.test.GenericTestUtils.LogCapturer.captureLogs;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+/**
+ * Test miscellaneous service operations through mocked failures.
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestServiceOperations {
+
+  @Mock
+  private Service service;
+
+  @Mock
+  private RuntimeException e;
+
+  @Test
+  public void testStopQuietlyWhenServiceStopThrowsException() throws Exception {
+    Logger logger = LoggerFactory.getLogger(TestServiceOperations.class);
+    LogCapturer logCapturer = captureLogs(logger);
+    doThrow(e).when(service).stop();
+
+    ServiceOperations.stopQuietly(logger, service);
+
+    assertThat(logCapturer.getOutput(),
+        containsString("When stopping the service " + service.getName()
+            + " : " + e));
+    verify(e, times(1)).printStackTrace(Mockito.any(PrintWriter.class));
+  }
+
+}

Certains fichiers n'ont pas été affichés car il y a eu trop de fichiers modifiés dans ce diff