Browse Source

merge changes from trunk to HDFS-4949 branch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1524865 13f79535-47bb-0310-9956-ffa450edef68
Andrew Wang 11 years ago
parent
commit
aae86e4f3f
100 changed files with 2864 additions and 815 deletions
  1. 48 0
      hadoop-assemblies/src/main/resources/assemblies/hadoop-hdfs-nfs-dist.xml
  2. 48 0
      hadoop-assemblies/src/main/resources/assemblies/hadoop-nfs-dist.xml
  3. 5 56
      hadoop-common-project/hadoop-auth/pom.xml
  4. 7 16
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java
  5. 12 14
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java
  6. 26 21
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java
  7. 43 17
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java
  8. 41 20
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java
  9. 11 6
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java
  10. 64 49
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
  11. 39 33
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java
  12. 45 31
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
  13. 18 11
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
  14. 15 4
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java
  15. 6 7
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
  16. 21 15
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestSigner.java
  17. 0 28
      hadoop-common-project/hadoop-auth/src/test/resources/krb5.conf
  18. 69 1
      hadoop-common-project/hadoop-common/CHANGES.txt
  19. 1 5
      hadoop-common-project/hadoop-common/pom.xml
  20. 34 1
      hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
  21. 17 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  22. 11 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
  23. 20 10
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
  24. 12 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
  25. 4 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
  26. 8 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
  27. 15 11
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
  28. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SnapshotCommands.java
  29. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
  30. 6 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
  31. 94 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
  32. 39 19
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLogAppender.java
  33. 126 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
  34. 62 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/LossyRetryInvocationHandler.java
  35. 1 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
  36. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
  37. 21 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
  38. 9 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
  39. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
  40. 19 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenIdentifier.java
  41. 34 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
  42. 20 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
  43. 14 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
  44. 3 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
  45. 19 12
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
  46. 2 2
      hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
  47. 26 15
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java
  48. 17 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
  49. 20 8
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
  50. 15 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
  51. 37 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
  52. 27 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
  53. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java
  54. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java
  55. 16 6
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
  56. 47 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
  57. 37 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLogAppender.java
  58. 5 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
  59. 22 0
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java
  60. 31 29
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
  61. 4 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java
  62. 6 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
  63. 17 16
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java
  64. 1 1
      hadoop-common-project/hadoop-minikdc/pom.xml
  65. 0 121
      hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytabEncoder.java
  66. 15 3
      hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
  67. 7 3
      hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
  68. 46 0
      hadoop-common-project/hadoop-nfs/pom.xml
  69. 20 6
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java
  70. 24 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/AccessPrivilege.java
  71. 388 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
  72. 2 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java
  73. 19 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java
  74. 63 44
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java
  75. 10 10
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java
  76. 9 9
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java
  77. 4 2
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java
  78. 12 9
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
  79. 1 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java
  80. 1 1
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java
  81. 53 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java
  82. 15 25
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsGSS.java
  83. 43 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsNone.java
  84. 114 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java
  85. 10 17
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/RpcAuthInfo.java
  86. 63 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java
  87. 59 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SysSecurityHandler.java
  88. 49 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
  89. 41 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/VerifierGSS.java
  90. 41 0
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/VerifierNone.java
  91. 10 4
      hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java
  92. 191 0
      hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java
  93. 3 2
      hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java
  94. 7 3
      hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcCall.java
  95. 19 18
      hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/security/TestCredentialsSys.java
  96. 3 13
      hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/security/TestRpcAuthInfo.java
  97. 2 0
      hadoop-dist/pom.xml
  98. 44 1
      hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml
  99. 22 2
      hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/mount/RpcProgramMountd.java
  100. 7 2
      hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/Nfs3.java

+ 48 - 0
hadoop-assemblies/src/main/resources/assemblies/hadoop-hdfs-nfs-dist.xml

@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<assembly>
+  <id>hadoop-hdfs-nfs-dist</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>target</directory>
+      <outputDirectory>/share/hadoop/hdfs</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+
+  <dependencySets>
+    <dependencySet>
+      <useProjectArtifact>false</useProjectArtifact>
+      <outputDirectory>/share/hadoop/hdfs/lib</outputDirectory>
+      <!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
+      <excludes>
+        <exclude>org.apache.hadoop:hadoop-common</exclude>
+        <exclude>org.apache.hadoop:hadoop-hdfs</exclude>
+        <!-- use slf4j from common to avoid multiple binding warnings -->
+        <exclude>org.slf4j:slf4j-api</exclude>
+        <exclude>org.slf4j:slf4j-log4j12</exclude>
+        <exclude>org.hsqldb:hsqldb</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
+
+</assembly>
+

+ 48 - 0
hadoop-assemblies/src/main/resources/assemblies/hadoop-nfs-dist.xml

@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<assembly>
+  <id>hadoop-nfs-dist</id>
+  <formats>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>target</directory>
+      <outputDirectory>/share/hadoop/common</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+
+  <dependencySets>
+    <dependencySet>
+      <useProjectArtifact>false</useProjectArtifact>
+      <outputDirectory>/share/hadoop/common/lib</outputDirectory>
+      <!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
+      <excludes>
+        <exclude>org.apache.hadoop:hadoop-common</exclude>
+        <exclude>org.apache.hadoop:hadoop-hdfs</exclude>
+        <!-- use slf4j from common to avoid multiple binding warnings -->
+        <exclude>org.slf4j:slf4j-api</exclude>
+        <exclude>org.slf4j:slf4j-log4j12</exclude>
+        <exclude>org.hsqldb:hsqldb</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
+
+</assembly>
+

+ 5 - 56
hadoop-common-project/hadoop-auth/pom.xml

@@ -33,7 +33,6 @@
 
 
   <properties>
   <properties>
     <maven.build.timestamp.format>yyyyMMdd</maven.build.timestamp.format>
     <maven.build.timestamp.format>yyyyMMdd</maven.build.timestamp.format>
-    <kerberos.realm>LOCALHOST</kerberos.realm>
   </properties>
   </properties>
 
 
   <dependencies>
   <dependencies>
@@ -83,38 +82,15 @@
       <artifactId>slf4j-log4j12</artifactId>
       <artifactId>slf4j-log4j12</artifactId>
       <scope>runtime</scope>
       <scope>runtime</scope>
     </dependency>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minikdc</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   </dependencies>
 
 
   <build>
   <build>
-    <testResources>
-      <testResource>
-        <directory>${basedir}/src/test/resources</directory>
-        <filtering>true</filtering>
-        <includes>
-          <include>krb5.conf</include>
-        </includes>
-      </testResource>
-    </testResources>
     <plugins>
     <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkMode>always</forkMode>
-          <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
-          <systemPropertyVariables>
-            <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
-            <kerberos.realm>${kerberos.realm}</kerberos.realm>
-          </systemPropertyVariables>
-          <excludes>
-            <exclude>**/${test.exclude}.java</exclude>
-            <exclude>${test.exclude.pattern}</exclude>
-            <exclude>**/TestKerberosAuth*.java</exclude>
-            <exclude>**/TestAltKerberosAuth*.java</exclude>
-            <exclude>**/Test*$*.java</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
       <plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-source-plugin</artifactId>
         <artifactId>maven-source-plugin</artifactId>
@@ -134,33 +110,6 @@
   </build>
   </build>
 
 
   <profiles>
   <profiles>
-    <profile>
-      <id>testKerberos</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-surefire-plugin</artifactId>
-            <configuration>
-              <forkMode>always</forkMode>
-              <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
-              <systemPropertyVariables>
-                <java.security.krb5.conf>${project.build.directory}/test-classes/krb5.conf</java.security.krb5.conf>
-                <kerberos.realm>${kerberos.realm}</kerberos.realm>
-              </systemPropertyVariables>
-              <excludes>
-                <exclude>**/${test.exclude}.java</exclude>
-                <exclude>${test.exclude.pattern}</exclude>
-                <exclude>**/Test*$*.java</exclude>
-              </excludes>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
     <profile>
     <profile>
       <id>docs</id>
       <id>docs</id>
       <activation>
       <activation>

+ 7 - 16
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java

@@ -13,7 +13,6 @@
  */
  */
 package org.apache.hadoop.security.authentication;
 package org.apache.hadoop.security.authentication;
 
 
-
 import javax.security.auth.Subject;
 import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.AppConfigurationEntry;
@@ -26,6 +25,7 @@ import java.io.File;
 import java.security.Principal;
 import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
 import java.security.PrivilegedExceptionAction;
+import java.util.UUID;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Map;
@@ -36,32 +36,23 @@ import java.util.concurrent.Callable;
  * Test helper class for Java Kerberos setup.
  * Test helper class for Java Kerberos setup.
  */
  */
 public class KerberosTestUtils {
 public class KerberosTestUtils {
-  private static final String PREFIX = "hadoop-auth.test.";
-
-  public static final String REALM = PREFIX + "kerberos.realm";
-
-  public static final String CLIENT_PRINCIPAL = PREFIX + "kerberos.client.principal";
-
-  public static final String SERVER_PRINCIPAL = PREFIX + "kerberos.server.principal";
-
-  public static final String KEYTAB_FILE = PREFIX + "kerberos.keytab.file";
+  private static String keytabFile = new File(System.getProperty("test.dir", "target"),
+          UUID.randomUUID().toString()).toString();
 
 
   public static String getRealm() {
   public static String getRealm() {
-    return System.getProperty(REALM, "LOCALHOST");
+    return "EXAMPLE.COM";
   }
   }
 
 
   public static String getClientPrincipal() {
   public static String getClientPrincipal() {
-    return System.getProperty(CLIENT_PRINCIPAL, "client") + "@" + getRealm();
+    return "client@EXAMPLE.COM";
   }
   }
 
 
   public static String getServerPrincipal() {
   public static String getServerPrincipal() {
-    return System.getProperty(SERVER_PRINCIPAL, "HTTP/localhost") + "@" + getRealm();
+    return "HTTP/localhost@EXAMPLE.COM";
   }
   }
 
 
   public static String getKeytabFile() {
   public static String getKeytabFile() {
-    String keytabFile =
-      new File(System.getProperty("user.home"), System.getProperty("user.name") + ".keytab").toString();
-    return System.getProperty(KEYTAB_FILE, keytabFile);
+    return keytabFile;
   }
   }
 
 
   private static class KerberosConfiguration extends Configuration {
   private static class KerberosConfiguration extends Configuration {

+ 12 - 14
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/AuthenticatorTestCase.java

@@ -2,9 +2,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -13,10 +13,7 @@
  */
  */
 package org.apache.hadoop.security.authentication.client;
 package org.apache.hadoop.security.authentication.client;
 
 
-import junit.framework.Assert;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-import junit.framework.TestCase;
-import org.mockito.Mockito;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.FilterHolder;
 import org.mortbay.jetty.servlet.FilterHolder;
@@ -27,19 +24,20 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
-import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.OutputStreamWriter;
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
 import java.io.Writer;
 import java.io.Writer;
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
 import java.net.ServerSocket;
 import java.net.ServerSocket;
 import java.net.URL;
 import java.net.URL;
 import java.util.Properties;
 import java.util.Properties;
+import org.junit.Assert;
 
 
-public abstract class AuthenticatorTestCase extends TestCase {
+public class AuthenticatorTestCase {
   private Server server;
   private Server server;
   private String host = null;
   private String host = null;
   private int port = -1;
   private int port = -1;
@@ -151,18 +149,18 @@ public abstract class AuthenticatorTestCase extends TestCase {
         writer.write(POST);
         writer.write(POST);
         writer.close();
         writer.close();
       }
       }
-      assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+      Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
       if (doPost) {
       if (doPost) {
         BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
         BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
         String echo = reader.readLine();
         String echo = reader.readLine();
-        assertEquals(POST, echo);
-        assertNull(reader.readLine());
+        Assert.assertEquals(POST, echo);
+        Assert.assertNull(reader.readLine());
       }
       }
       aUrl = new AuthenticatedURL();
       aUrl = new AuthenticatedURL();
       conn = aUrl.openConnection(url, token);
       conn = aUrl.openConnection(url, token);
       conn.connect();
       conn.connect();
-      assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
-      assertEquals(tokenStr, token.toString());
+      Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+      Assert.assertEquals(tokenStr, token.toString());
     } finally {
     } finally {
       stop();
       stop();
     }
     }

+ 26 - 21
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java

@@ -13,8 +13,8 @@
  */
  */
 package org.apache.hadoop.security.authentication.client;
 package org.apache.hadoop.security.authentication.client;
 
 
-import junit.framework.Assert;
-import junit.framework.TestCase;
+import org.junit.Assert;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
 
 
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
@@ -24,46 +24,48 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 
 
-public class TestAuthenticatedURL extends TestCase {
+public class TestAuthenticatedURL {
 
 
+  @Test
   public void testToken() throws Exception {
   public void testToken() throws Exception {
     AuthenticatedURL.Token token = new AuthenticatedURL.Token();
     AuthenticatedURL.Token token = new AuthenticatedURL.Token();
-    assertFalse(token.isSet());
+    Assert.assertFalse(token.isSet());
     token = new AuthenticatedURL.Token("foo");
     token = new AuthenticatedURL.Token("foo");
-    assertTrue(token.isSet());
-    assertEquals("foo", token.toString());
+    Assert.assertTrue(token.isSet());
+    Assert.assertEquals("foo", token.toString());
 
 
     AuthenticatedURL.Token token1 = new AuthenticatedURL.Token();
     AuthenticatedURL.Token token1 = new AuthenticatedURL.Token();
     AuthenticatedURL.Token token2 = new AuthenticatedURL.Token();
     AuthenticatedURL.Token token2 = new AuthenticatedURL.Token();
-    assertEquals(token1.hashCode(), token2.hashCode());
-    assertTrue(token1.equals(token2));
+    Assert.assertEquals(token1.hashCode(), token2.hashCode());
+    Assert.assertTrue(token1.equals(token2));
 
 
     token1 = new AuthenticatedURL.Token();
     token1 = new AuthenticatedURL.Token();
     token2 = new AuthenticatedURL.Token("foo");
     token2 = new AuthenticatedURL.Token("foo");
-    assertNotSame(token1.hashCode(), token2.hashCode());
-    assertFalse(token1.equals(token2));
+    Assert.assertNotSame(token1.hashCode(), token2.hashCode());
+    Assert.assertFalse(token1.equals(token2));
 
 
     token1 = new AuthenticatedURL.Token("foo");
     token1 = new AuthenticatedURL.Token("foo");
     token2 = new AuthenticatedURL.Token();
     token2 = new AuthenticatedURL.Token();
-    assertNotSame(token1.hashCode(), token2.hashCode());
-    assertFalse(token1.equals(token2));
+    Assert.assertNotSame(token1.hashCode(), token2.hashCode());
+    Assert.assertFalse(token1.equals(token2));
 
 
     token1 = new AuthenticatedURL.Token("foo");
     token1 = new AuthenticatedURL.Token("foo");
     token2 = new AuthenticatedURL.Token("foo");
     token2 = new AuthenticatedURL.Token("foo");
-    assertEquals(token1.hashCode(), token2.hashCode());
-    assertTrue(token1.equals(token2));
+    Assert.assertEquals(token1.hashCode(), token2.hashCode());
+    Assert.assertTrue(token1.equals(token2));
 
 
     token1 = new AuthenticatedURL.Token("bar");
     token1 = new AuthenticatedURL.Token("bar");
     token2 = new AuthenticatedURL.Token("foo");
     token2 = new AuthenticatedURL.Token("foo");
-    assertNotSame(token1.hashCode(), token2.hashCode());
-    assertFalse(token1.equals(token2));
+    Assert.assertNotSame(token1.hashCode(), token2.hashCode());
+    Assert.assertFalse(token1.equals(token2));
 
 
     token1 = new AuthenticatedURL.Token("foo");
     token1 = new AuthenticatedURL.Token("foo");
     token2 = new AuthenticatedURL.Token("bar");
     token2 = new AuthenticatedURL.Token("bar");
-    assertNotSame(token1.hashCode(), token2.hashCode());
-    assertFalse(token1.equals(token2));
+    Assert.assertNotSame(token1.hashCode(), token2.hashCode());
+    Assert.assertFalse(token1.equals(token2));
   }
   }
 
 
+  @Test
   public void testInjectToken() throws Exception {
   public void testInjectToken() throws Exception {
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
     AuthenticatedURL.Token token = new AuthenticatedURL.Token();
     AuthenticatedURL.Token token = new AuthenticatedURL.Token();
@@ -72,6 +74,7 @@ public class TestAuthenticatedURL extends TestCase {
     Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString());
     Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString());
   }
   }
 
 
+  @Test
   public void testExtractTokenOK() throws Exception {
   public void testExtractTokenOK() throws Exception {
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
 
 
@@ -87,9 +90,10 @@ public class TestAuthenticatedURL extends TestCase {
     AuthenticatedURL.Token token = new AuthenticatedURL.Token();
     AuthenticatedURL.Token token = new AuthenticatedURL.Token();
     AuthenticatedURL.extractToken(conn, token);
     AuthenticatedURL.extractToken(conn, token);
 
 
-    assertEquals(tokenStr, token.toString());
+    Assert.assertEquals(tokenStr, token.toString());
   }
   }
 
 
+  @Test
   public void testExtractTokenFail() throws Exception {
   public void testExtractTokenFail() throws Exception {
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
 
 
@@ -106,15 +110,16 @@ public class TestAuthenticatedURL extends TestCase {
     token.set("bar");
     token.set("bar");
     try {
     try {
       AuthenticatedURL.extractToken(conn, token);
       AuthenticatedURL.extractToken(conn, token);
-      fail();
+      Assert.fail();
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {
       // Expected
       // Expected
       Assert.assertFalse(token.isSet());
       Assert.assertFalse(token.isSet());
     } catch (Exception ex) {
     } catch (Exception ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
 
 
+  @Test
   public void testConnectionConfigurator() throws Exception {
   public void testConnectionConfigurator() throws Exception {
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
     HttpURLConnection conn = Mockito.mock(HttpURLConnection.class);
     Mockito.when(conn.getResponseCode()).
     Mockito.when(conn.getResponseCode()).

+ 43 - 17
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestKerberosAuthenticator.java

@@ -13,17 +13,33 @@
  */
  */
 package org.apache.hadoop.security.authentication.client;
 package org.apache.hadoop.security.authentication.client;
 
 
+import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 
 
+import java.io.File;
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.net.URL;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.concurrent.Callable;
 import java.util.concurrent.Callable;
 
 
-public class TestKerberosAuthenticator extends AuthenticatorTestCase {
+public class TestKerberosAuthenticator extends KerberosSecurityTestcase {
+
+  @Before
+  public void setup() throws Exception {
+    // create keytab
+    File keytabFile = new File(KerberosTestUtils.getKeytabFile());
+    String clientPrincipal = KerberosTestUtils.getClientPrincipal();
+    String serverPrincipal = KerberosTestUtils.getServerPrincipal();
+    clientPrincipal = clientPrincipal.substring(0, clientPrincipal.lastIndexOf("@"));
+    serverPrincipal = serverPrincipal.substring(0, serverPrincipal.lastIndexOf("@"));
+    getKdc().createPrincipal(keytabFile, clientPrincipal, serverPrincipal);
+  }
 
 
   private Properties getAuthenticationHandlerConfiguration() {
   private Properties getAuthenticationHandlerConfiguration() {
     Properties props = new Properties();
     Properties props = new Properties();
@@ -35,57 +51,67 @@ public class TestKerberosAuthenticator extends AuthenticatorTestCase {
     return props;
     return props;
   }
   }
 
 
+  @Test(timeout=60000)
   public void testFallbacktoPseudoAuthenticator() throws Exception {
   public void testFallbacktoPseudoAuthenticator() throws Exception {
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
     Properties props = new Properties();
     Properties props = new Properties();
     props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
     props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
     props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
     props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
-    setAuthenticationHandlerConfig(props);
-    _testAuthentication(new KerberosAuthenticator(), false);
+    auth.setAuthenticationHandlerConfig(props);
+    auth._testAuthentication(new KerberosAuthenticator(), false);
   }
   }
 
 
+  @Test(timeout=60000)
   public void testFallbacktoPseudoAuthenticatorAnonymous() throws Exception {
   public void testFallbacktoPseudoAuthenticatorAnonymous() throws Exception {
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
     Properties props = new Properties();
     Properties props = new Properties();
     props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
     props.setProperty(AuthenticationFilter.AUTH_TYPE, "simple");
     props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
     props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true");
-    setAuthenticationHandlerConfig(props);
-    _testAuthentication(new KerberosAuthenticator(), false);
+    auth.setAuthenticationHandlerConfig(props);
+    auth._testAuthentication(new KerberosAuthenticator(), false);
   }
   }
 
 
+  @Test(timeout=60000)
   public void testNotAuthenticated() throws Exception {
   public void testNotAuthenticated() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
-    start();
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
+    auth.start();
     try {
     try {
-      URL url = new URL(getBaseURL());
+      URL url = new URL(auth.getBaseURL());
       HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       conn.connect();
       conn.connect();
-      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
-      assertTrue(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE) != null);
+      Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
+      Assert.assertTrue(conn.getHeaderField(KerberosAuthenticator.WWW_AUTHENTICATE) != null);
     } finally {
     } finally {
-      stop();
+      auth.stop();
     }
     }
   }
   }
 
 
-
+  @Test(timeout=60000)
   public void testAuthentication() throws Exception {
   public void testAuthentication() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
+    final AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration());
     KerberosTestUtils.doAsClient(new Callable<Void>() {
     KerberosTestUtils.doAsClient(new Callable<Void>() {
       @Override
       @Override
       public Void call() throws Exception {
       public Void call() throws Exception {
-        _testAuthentication(new KerberosAuthenticator(), false);
+        auth._testAuthentication(new KerberosAuthenticator(), false);
         return null;
         return null;
       }
       }
     });
     });
   }
   }
 
 
+  @Test(timeout=60000)
   public void testAuthenticationPost() throws Exception {
   public void testAuthenticationPost() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration());
+    final AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration());
     KerberosTestUtils.doAsClient(new Callable<Void>() {
     KerberosTestUtils.doAsClient(new Callable<Void>() {
       @Override
       @Override
       public Void call() throws Exception {
       public Void call() throws Exception {
-        _testAuthentication(new KerberosAuthenticator(), true);
+        auth._testAuthentication(new KerberosAuthenticator(), true);
         return null;
         return null;
       }
       }
     });
     });
   }
   }
-
 }
 }

+ 41 - 20
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestPseudoAuthenticator.java

@@ -15,12 +15,14 @@ package org.apache.hadoop.security.authentication.client;
 
 
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.junit.Assert;
+import org.junit.Test;
 
 
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.net.URL;
 import java.util.Properties;
 import java.util.Properties;
 
 
-public class TestPseudoAuthenticator extends AuthenticatorTestCase {
+public class TestPseudoAuthenticator {
 
 
   private Properties getAuthenticationHandlerConfiguration(boolean anonymousAllowed) {
   private Properties getAuthenticationHandlerConfiguration(boolean anonymousAllowed) {
     Properties props = new Properties();
     Properties props = new Properties();
@@ -29,55 +31,74 @@ public class TestPseudoAuthenticator extends AuthenticatorTestCase {
     return props;
     return props;
   }
   }
 
 
+  @Test
   public void testGetUserName() throws Exception {
   public void testGetUserName() throws Exception {
     PseudoAuthenticator authenticator = new PseudoAuthenticator();
     PseudoAuthenticator authenticator = new PseudoAuthenticator();
-    assertEquals(System.getProperty("user.name"), authenticator.getUserName());
+    Assert.assertEquals(System.getProperty("user.name"), authenticator.getUserName());
   }
   }
 
 
+  @Test
   public void testAnonymousAllowed() throws Exception {
   public void testAnonymousAllowed() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
-    start();
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration(true));
+    auth.start();
     try {
     try {
-      URL url = new URL(getBaseURL());
+      URL url = new URL(auth.getBaseURL());
       HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       conn.connect();
       conn.connect();
-      assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+      Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
     } finally {
     } finally {
-      stop();
+      auth.stop();
     }
     }
   }
   }
 
 
+  @Test
   public void testAnonymousDisallowed() throws Exception {
   public void testAnonymousDisallowed() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
-    start();
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration(false));
+    auth.start();
     try {
     try {
-      URL url = new URL(getBaseURL());
+      URL url = new URL(auth.getBaseURL());
       HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       HttpURLConnection conn = (HttpURLConnection) url.openConnection();
       conn.connect();
       conn.connect();
-      assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
+      Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, conn.getResponseCode());
     } finally {
     } finally {
-      stop();
+      auth.stop();
     }
     }
   }
   }
 
 
+  @Test
   public void testAuthenticationAnonymousAllowed() throws Exception {
   public void testAuthenticationAnonymousAllowed() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
-    _testAuthentication(new PseudoAuthenticator(), false);
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration(true));
+    auth._testAuthentication(new PseudoAuthenticator(), false);
   }
   }
 
 
+  @Test
   public void testAuthenticationAnonymousDisallowed() throws Exception {
   public void testAuthenticationAnonymousDisallowed() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
-    _testAuthentication(new PseudoAuthenticator(), false);
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration(false));
+    auth._testAuthentication(new PseudoAuthenticator(), false);
   }
   }
 
 
+  @Test
   public void testAuthenticationAnonymousAllowedWithPost() throws Exception {
   public void testAuthenticationAnonymousAllowedWithPost() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(true));
-    _testAuthentication(new PseudoAuthenticator(), true);
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration(true));
+    auth._testAuthentication(new PseudoAuthenticator(), true);
   }
   }
 
 
+  @Test
   public void testAuthenticationAnonymousDisallowedWithPost() throws Exception {
   public void testAuthenticationAnonymousDisallowedWithPost() throws Exception {
-    setAuthenticationHandlerConfig(getAuthenticationHandlerConfiguration(false));
-    _testAuthentication(new PseudoAuthenticator(), true);
+    AuthenticatorTestCase auth = new AuthenticatorTestCase();
+    auth.setAuthenticationHandlerConfig(
+            getAuthenticationHandlerConfiguration(false));
+    auth._testAuthentication(new PseudoAuthenticator(), true);
   }
   }
 
 
 }
 }

+ 11 - 6
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAltKerberosAuthenticationHandler.java

@@ -18,6 +18,8 @@ import java.util.Properties;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.junit.Assert;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
 
 
 public class TestAltKerberosAuthenticationHandler
 public class TestAltKerberosAuthenticationHandler
@@ -45,6 +47,7 @@ public class TestAltKerberosAuthenticationHandler
     return AltKerberosAuthenticationHandler.TYPE;
     return AltKerberosAuthenticationHandler.TYPE;
   }
   }
 
 
+  @Test(timeout=60000)
   public void testAlternateAuthenticationAsBrowser() throws Exception {
   public void testAlternateAuthenticationAsBrowser() throws Exception {
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
@@ -54,11 +57,12 @@ public class TestAltKerberosAuthenticationHandler
     Mockito.when(request.getHeader("User-Agent")).thenReturn("Some Browser");
     Mockito.when(request.getHeader("User-Agent")).thenReturn("Some Browser");
 
 
     AuthenticationToken token = handler.authenticate(request, response);
     AuthenticationToken token = handler.authenticate(request, response);
-    assertEquals("A", token.getUserName());
-    assertEquals("B", token.getName());
-    assertEquals(getExpectedType(), token.getType());
+    Assert.assertEquals("A", token.getUserName());
+    Assert.assertEquals("B", token.getName());
+    Assert.assertEquals(getExpectedType(), token.getType());
   }
   }
 
 
+  @Test(timeout=60000)
   public void testNonDefaultNonBrowserUserAgentAsBrowser() throws Exception {
   public void testNonDefaultNonBrowserUserAgentAsBrowser() throws Exception {
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
@@ -81,11 +85,12 @@ public class TestAltKerberosAuthenticationHandler
     Mockito.when(request.getHeader("User-Agent")).thenReturn("blah");
     Mockito.when(request.getHeader("User-Agent")).thenReturn("blah");
     // Should use alt authentication
     // Should use alt authentication
     AuthenticationToken token = handler.authenticate(request, response);
     AuthenticationToken token = handler.authenticate(request, response);
-    assertEquals("A", token.getUserName());
-    assertEquals("B", token.getName());
-    assertEquals(getExpectedType(), token.getType());
+    Assert.assertEquals("A", token.getUserName());
+    Assert.assertEquals("B", token.getName());
+    Assert.assertEquals(getExpectedType(), token.getType());
   }
   }
 
 
+  @Test(timeout=60000)
   public void testNonDefaultNonBrowserUserAgentAsNonBrowser() throws Exception {
   public void testNonDefaultNonBrowserUserAgentAsNonBrowser() throws Exception {
     if (handler != null) {
     if (handler != null) {
       handler.destroy();
       handler.destroy();

+ 64 - 49
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java

@@ -16,7 +16,8 @@ package org.apache.hadoop.security.authentication.server;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.util.Signer;
 import org.apache.hadoop.security.authentication.util.Signer;
-import junit.framework.TestCase;
+import org.junit.Assert;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 import org.mockito.stubbing.Answer;
@@ -34,8 +35,9 @@ import java.util.Arrays;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.Vector;
 import java.util.Vector;
 
 
-public class TestAuthenticationFilter extends TestCase {
+public class TestAuthenticationFilter {
 
 
+  @Test
   public void testGetConfiguration() throws Exception {
   public void testGetConfiguration() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     FilterConfig config = Mockito.mock(FilterConfig.class);
     FilterConfig config = Mockito.mock(FilterConfig.class);
@@ -43,27 +45,28 @@ public class TestAuthenticationFilter extends TestCase {
     Mockito.when(config.getInitParameter("a")).thenReturn("A");
     Mockito.when(config.getInitParameter("a")).thenReturn("A");
     Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("a")).elements());
     Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("a")).elements());
     Properties props = filter.getConfiguration("", config);
     Properties props = filter.getConfiguration("", config);
-    assertEquals("A", props.getProperty("a"));
+    Assert.assertEquals("A", props.getProperty("a"));
 
 
     config = Mockito.mock(FilterConfig.class);
     config = Mockito.mock(FilterConfig.class);
     Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
     Mockito.when(config.getInitParameter(AuthenticationFilter.CONFIG_PREFIX)).thenReturn("foo");
     Mockito.when(config.getInitParameter("foo.a")).thenReturn("A");
     Mockito.when(config.getInitParameter("foo.a")).thenReturn("A");
     Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("foo.a")).elements());
     Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>(Arrays.asList("foo.a")).elements());
     props = filter.getConfiguration("foo.", config);
     props = filter.getConfiguration("foo.", config);
-    assertEquals("A", props.getProperty("a"));
+    Assert.assertEquals("A", props.getProperty("a"));
   }
   }
 
 
+  @Test
   public void testInitEmpty() throws Exception {
   public void testInitEmpty() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
       FilterConfig config = Mockito.mock(FilterConfig.class);
       FilterConfig config = Mockito.mock(FilterConfig.class);
       Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>().elements());
       Mockito.when(config.getInitParameterNames()).thenReturn(new Vector<String>().elements());
       filter.init(config);
       filter.init(config);
-      fail();
+      Assert.fail();
     } catch (ServletException ex) {
     } catch (ServletException ex) {
       // Expected
       // Expected
     } catch (Exception ex) {
     } catch (Exception ex) {
-      fail();
+      Assert.fail();
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
@@ -126,6 +129,7 @@ public class TestAuthenticationFilter extends TestCase {
     }
     }
   }
   }
 
 
+  @Test
   public void testInit() throws Exception {
   public void testInit() throws Exception {
 
 
     // minimal configuration & simple auth handler (Pseudo)
     // minimal configuration & simple auth handler (Pseudo)
@@ -138,11 +142,11 @@ public class TestAuthenticationFilter extends TestCase {
         new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
         new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
                                  AuthenticationFilter.AUTH_TOKEN_VALIDITY)).elements());
                                  AuthenticationFilter.AUTH_TOKEN_VALIDITY)).elements());
       filter.init(config);
       filter.init(config);
-      assertEquals(PseudoAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
-      assertTrue(filter.isRandomSecret());
-      assertNull(filter.getCookieDomain());
-      assertNull(filter.getCookiePath());
-      assertEquals(1000, filter.getValidity());
+      Assert.assertEquals(PseudoAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
+      Assert.assertTrue(filter.isRandomSecret());
+      Assert.assertNull(filter.getCookieDomain());
+      Assert.assertNull(filter.getCookiePath());
+      Assert.assertEquals(1000, filter.getValidity());
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
@@ -157,7 +161,7 @@ public class TestAuthenticationFilter extends TestCase {
         new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
         new Vector<String>(Arrays.asList(AuthenticationFilter.AUTH_TYPE,
                                  AuthenticationFilter.SIGNATURE_SECRET)).elements());
                                  AuthenticationFilter.SIGNATURE_SECRET)).elements());
       filter.init(config);
       filter.init(config);
-      assertFalse(filter.isRandomSecret());
+      Assert.assertFalse(filter.isRandomSecret());
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
@@ -174,13 +178,12 @@ public class TestAuthenticationFilter extends TestCase {
                                  AuthenticationFilter.COOKIE_DOMAIN,
                                  AuthenticationFilter.COOKIE_DOMAIN,
                                  AuthenticationFilter.COOKIE_PATH)).elements());
                                  AuthenticationFilter.COOKIE_PATH)).elements());
       filter.init(config);
       filter.init(config);
-      assertEquals(".foo.com", filter.getCookieDomain());
-      assertEquals("/bar", filter.getCookiePath());
+      Assert.assertEquals(".foo.com", filter.getCookieDomain());
+      Assert.assertEquals("/bar", filter.getCookiePath());
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
 
 
-
     // authentication handler lifecycle, and custom impl
     // authentication handler lifecycle, and custom impl
     DummyAuthenticationHandler.reset();
     DummyAuthenticationHandler.reset();
     filter = new AuthenticationFilter();
     filter = new AuthenticationFilter();
@@ -195,10 +198,10 @@ public class TestAuthenticationFilter extends TestCase {
           Arrays.asList(AuthenticationFilter.AUTH_TYPE,
           Arrays.asList(AuthenticationFilter.AUTH_TYPE,
                         "management.operation.return")).elements());
                         "management.operation.return")).elements());
       filter.init(config);
       filter.init(config);
-      assertTrue(DummyAuthenticationHandler.init);
+      Assert.assertTrue(DummyAuthenticationHandler.init);
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
-      assertTrue(DummyAuthenticationHandler.destroy);
+      Assert.assertTrue(DummyAuthenticationHandler.destroy);
     }
     }
 
 
     // kerberos auth handler
     // kerberos auth handler
@@ -212,11 +215,12 @@ public class TestAuthenticationFilter extends TestCase {
     } catch (ServletException ex) {
     } catch (ServletException ex) {
       // Expected
       // Expected
     } finally {
     } finally {
-      assertEquals(KerberosAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
+      Assert.assertEquals(KerberosAuthenticationHandler.class, filter.getAuthenticationHandler().getClass());
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testGetRequestURL() throws Exception {
   public void testGetRequestURL() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -235,12 +239,13 @@ public class TestAuthenticationFilter extends TestCase {
       Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
       Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
       Mockito.when(request.getQueryString()).thenReturn("a=A&b=B");
       Mockito.when(request.getQueryString()).thenReturn("a=A&b=B");
 
 
-      assertEquals("http://foo:8080/bar?a=A&b=B", filter.getRequestURL(request));
+      Assert.assertEquals("http://foo:8080/bar?a=A&b=B", filter.getRequestURL(request));
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testGetToken() throws Exception {
   public void testGetToken() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -268,12 +273,13 @@ public class TestAuthenticationFilter extends TestCase {
 
 
       AuthenticationToken newToken = filter.getToken(request);
       AuthenticationToken newToken = filter.getToken(request);
 
 
-      assertEquals(token.toString(), newToken.toString());
+      Assert.assertEquals(token.toString(), newToken.toString());
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testGetTokenExpired() throws Exception {
   public void testGetTokenExpired() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -300,17 +306,18 @@ public class TestAuthenticationFilter extends TestCase {
 
 
       try {
       try {
         filter.getToken(request);
         filter.getToken(request);
-        fail();
+        Assert.fail();
       } catch (AuthenticationException ex) {
       } catch (AuthenticationException ex) {
         // Expected
         // Expected
       } catch (Exception ex) {
       } catch (Exception ex) {
-        fail();
+        Assert.fail();
       }
       }
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testGetTokenInvalidType() throws Exception {
   public void testGetTokenInvalidType() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -338,17 +345,18 @@ public class TestAuthenticationFilter extends TestCase {
 
 
       try {
       try {
         filter.getToken(request);
         filter.getToken(request);
-        fail();
+        Assert.fail();
       } catch (AuthenticationException ex) {
       } catch (AuthenticationException ex) {
         // Expected
         // Expected
       } catch (Exception ex) {
       } catch (Exception ex) {
-        fail();
+        Assert.fail();
       }
       }
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testDoFilterNotAuthenticated() throws Exception {
   public void testDoFilterNotAuthenticated() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -374,7 +382,7 @@ public class TestAuthenticationFilter extends TestCase {
         new Answer<Object>() {
         new Answer<Object>() {
           @Override
           @Override
           public Object answer(InvocationOnMock invocation) throws Throwable {
           public Object answer(InvocationOnMock invocation) throws Throwable {
-            fail();
+            Assert.fail();
             return null;
             return null;
           }
           }
         }
         }
@@ -468,27 +476,27 @@ public class TestAuthenticationFilter extends TestCase {
         Mockito.verify(response, Mockito.never()).
         Mockito.verify(response, Mockito.never()).
           addCookie(Mockito.any(Cookie.class));
           addCookie(Mockito.any(Cookie.class));
       } else {
       } else {
-        assertNotNull(setCookie[0]);
-        assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
-        assertTrue(setCookie[0].getValue().contains("u="));
-        assertTrue(setCookie[0].getValue().contains("p="));
-        assertTrue(setCookie[0].getValue().contains("t="));
-        assertTrue(setCookie[0].getValue().contains("e="));
-        assertTrue(setCookie[0].getValue().contains("s="));
-        assertTrue(calledDoFilter[0]);
+        Assert.assertNotNull(setCookie[0]);
+        Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
+        Assert.assertTrue(setCookie[0].getValue().contains("u="));
+        Assert.assertTrue(setCookie[0].getValue().contains("p="));
+        Assert.assertTrue(setCookie[0].getValue().contains("t="));
+        Assert.assertTrue(setCookie[0].getValue().contains("e="));
+        Assert.assertTrue(setCookie[0].getValue().contains("s="));
+        Assert.assertTrue(calledDoFilter[0]);
 
 
         Signer signer = new Signer("secret".getBytes());
         Signer signer = new Signer("secret".getBytes());
         String value = signer.verifyAndExtract(setCookie[0].getValue());
         String value = signer.verifyAndExtract(setCookie[0].getValue());
         AuthenticationToken token = AuthenticationToken.parse(value);
         AuthenticationToken token = AuthenticationToken.parse(value);
-        assertEquals(System.currentTimeMillis() + 1000 * 1000,
+        Assert.assertEquals(System.currentTimeMillis() + 1000 * 1000,
                      token.getExpires(), 100);
                      token.getExpires(), 100);
 
 
         if (withDomainPath) {
         if (withDomainPath) {
-          assertEquals(".foo.com", setCookie[0].getDomain());
-          assertEquals("/bar", setCookie[0].getPath());
+          Assert.assertEquals(".foo.com", setCookie[0].getDomain());
+          Assert.assertEquals("/bar", setCookie[0].getPath());
         } else {
         } else {
-          assertNull(setCookie[0].getDomain());
-          assertNull(setCookie[0].getPath());
+          Assert.assertNull(setCookie[0].getDomain());
+          Assert.assertNull(setCookie[0].getPath());
         }
         }
       }
       }
     } finally {
     } finally {
@@ -496,22 +504,27 @@ public class TestAuthenticationFilter extends TestCase {
     }
     }
   }
   }
 
 
+  @Test
   public void testDoFilterAuthentication() throws Exception {
   public void testDoFilterAuthentication() throws Exception {
     _testDoFilterAuthentication(false, false, false);
     _testDoFilterAuthentication(false, false, false);
   }
   }
 
 
+  @Test
   public void testDoFilterAuthenticationImmediateExpiration() throws Exception {
   public void testDoFilterAuthenticationImmediateExpiration() throws Exception {
     _testDoFilterAuthentication(false, false, true);
     _testDoFilterAuthentication(false, false, true);
   }
   }
 
 
+  @Test
   public void testDoFilterAuthenticationWithInvalidToken() throws Exception {
   public void testDoFilterAuthenticationWithInvalidToken() throws Exception {
     _testDoFilterAuthentication(false, true, false);
     _testDoFilterAuthentication(false, true, false);
   }
   }
 
 
+  @Test
   public void testDoFilterAuthenticationWithDomainPath() throws Exception {
   public void testDoFilterAuthenticationWithDomainPath() throws Exception {
     _testDoFilterAuthentication(true, false, false);
     _testDoFilterAuthentication(true, false, false);
   }
   }
 
 
+  @Test
   public void testDoFilterAuthenticated() throws Exception {
   public void testDoFilterAuthenticated() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -547,8 +560,8 @@ public class TestAuthenticationFilter extends TestCase {
           public Object answer(InvocationOnMock invocation) throws Throwable {
           public Object answer(InvocationOnMock invocation) throws Throwable {
             Object[] args = invocation.getArguments();
             Object[] args = invocation.getArguments();
             HttpServletRequest request = (HttpServletRequest) args[0];
             HttpServletRequest request = (HttpServletRequest) args[0];
-            assertEquals("u", request.getRemoteUser());
-            assertEquals("p", request.getUserPrincipal().getName());
+            Assert.assertEquals("u", request.getRemoteUser());
+            Assert.assertEquals("p", request.getUserPrincipal().getName());
             return null;
             return null;
           }
           }
         }
         }
@@ -561,6 +574,7 @@ public class TestAuthenticationFilter extends TestCase {
     }
     }
   }
   }
 
 
+  @Test
   public void testDoFilterAuthenticatedExpired() throws Exception {
   public void testDoFilterAuthenticatedExpired() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -594,7 +608,7 @@ public class TestAuthenticationFilter extends TestCase {
         new Answer<Object>() {
         new Answer<Object>() {
           @Override
           @Override
           public Object answer(InvocationOnMock invocation) throws Throwable {
           public Object answer(InvocationOnMock invocation) throws Throwable {
-            fail();
+            Assert.fail();
             return null;
             return null;
           }
           }
         }
         }
@@ -616,15 +630,15 @@ public class TestAuthenticationFilter extends TestCase {
 
 
       Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
       Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
 
 
-      assertNotNull(setCookie[0]);
-      assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
-      assertEquals("", setCookie[0].getValue());
+      Assert.assertNotNull(setCookie[0]);
+      Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
+      Assert.assertEquals("", setCookie[0].getValue());
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
-
+  @Test
   public void testDoFilterAuthenticatedInvalidType() throws Exception {
   public void testDoFilterAuthenticatedInvalidType() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {
@@ -658,7 +672,7 @@ public class TestAuthenticationFilter extends TestCase {
         new Answer<Object>() {
         new Answer<Object>() {
           @Override
           @Override
           public Object answer(InvocationOnMock invocation) throws Throwable {
           public Object answer(InvocationOnMock invocation) throws Throwable {
-            fail();
+            Assert.fail();
             return null;
             return null;
           }
           }
         }
         }
@@ -680,14 +694,15 @@ public class TestAuthenticationFilter extends TestCase {
 
 
       Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
       Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
 
 
-      assertNotNull(setCookie[0]);
-      assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
-      assertEquals("", setCookie[0].getValue());
+      Assert.assertNotNull(setCookie[0]);
+      Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE, setCookie[0].getName());
+      Assert.assertEquals("", setCookie[0].getValue());
     } finally {
     } finally {
       filter.destroy();
       filter.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testManagementOperation() throws Exception {
   public void testManagementOperation() throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
     try {

+ 39 - 33
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationToken.java

@@ -14,98 +14,104 @@
 package org.apache.hadoop.security.authentication.server;
 package org.apache.hadoop.security.authentication.server;
 
 
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
-import junit.framework.TestCase;
+import org.junit.Assert;
+import org.junit.Test;
 
 
-public class TestAuthenticationToken extends TestCase {
+public class TestAuthenticationToken {
 
 
+  @Test
   public void testAnonymous() {
   public void testAnonymous() {
-    assertNotNull(AuthenticationToken.ANONYMOUS);
-    assertEquals(null, AuthenticationToken.ANONYMOUS.getUserName());
-    assertEquals(null, AuthenticationToken.ANONYMOUS.getName());
-    assertEquals(null, AuthenticationToken.ANONYMOUS.getType());
-    assertEquals(-1, AuthenticationToken.ANONYMOUS.getExpires());
-    assertFalse(AuthenticationToken.ANONYMOUS.isExpired());
+    Assert.assertNotNull(AuthenticationToken.ANONYMOUS);
+    Assert.assertEquals(null, AuthenticationToken.ANONYMOUS.getUserName());
+    Assert.assertEquals(null, AuthenticationToken.ANONYMOUS.getName());
+    Assert.assertEquals(null, AuthenticationToken.ANONYMOUS.getType());
+    Assert.assertEquals(-1, AuthenticationToken.ANONYMOUS.getExpires());
+    Assert.assertFalse(AuthenticationToken.ANONYMOUS.isExpired());
   }
   }
 
 
+  @Test
   public void testConstructor() throws Exception {
   public void testConstructor() throws Exception {
     try {
     try {
       new AuthenticationToken(null, "p", "t");
       new AuthenticationToken(null, "p", "t");
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     try {
     try {
       new AuthenticationToken("", "p", "t");
       new AuthenticationToken("", "p", "t");
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     try {
     try {
       new AuthenticationToken("u", null, "t");
       new AuthenticationToken("u", null, "t");
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     try {
     try {
       new AuthenticationToken("u", "", "t");
       new AuthenticationToken("u", "", "t");
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     try {
     try {
       new AuthenticationToken("u", "p", null);
       new AuthenticationToken("u", "p", null);
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     try {
     try {
       new AuthenticationToken("u", "p", "");
       new AuthenticationToken("u", "p", "");
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     new AuthenticationToken("u", "p", "t");
     new AuthenticationToken("u", "p", "t");
   }
   }
 
 
+  @Test
   public void testGetters() throws Exception {
   public void testGetters() throws Exception {
     long expires = System.currentTimeMillis() + 50;
     long expires = System.currentTimeMillis() + 50;
     AuthenticationToken token = new AuthenticationToken("u", "p", "t");
     AuthenticationToken token = new AuthenticationToken("u", "p", "t");
     token.setExpires(expires);
     token.setExpires(expires);
-    assertEquals("u", token.getUserName());
-    assertEquals("p", token.getName());
-    assertEquals("t", token.getType());
-    assertEquals(expires, token.getExpires());
-    assertFalse(token.isExpired());
+    Assert.assertEquals("u", token.getUserName());
+    Assert.assertEquals("p", token.getName());
+    Assert.assertEquals("t", token.getType());
+    Assert.assertEquals(expires, token.getExpires());
+    Assert.assertFalse(token.isExpired());
     Thread.sleep(70);               // +20 msec fuzz for timer granularity.
     Thread.sleep(70);               // +20 msec fuzz for timer granularity.
-    assertTrue(token.isExpired());
+    Assert.assertTrue(token.isExpired());
   }
   }
 
 
+  @Test
   public void testToStringAndParse() throws Exception {
   public void testToStringAndParse() throws Exception {
     long expires = System.currentTimeMillis() + 50;
     long expires = System.currentTimeMillis() + 50;
     AuthenticationToken token = new AuthenticationToken("u", "p", "t");
     AuthenticationToken token = new AuthenticationToken("u", "p", "t");
     token.setExpires(expires);
     token.setExpires(expires);
     String str = token.toString();
     String str = token.toString();
     token = AuthenticationToken.parse(str);
     token = AuthenticationToken.parse(str);
-    assertEquals("p", token.getName());
-    assertEquals("t", token.getType());
-    assertEquals(expires, token.getExpires());
-    assertFalse(token.isExpired());
+    Assert.assertEquals("p", token.getName());
+    Assert.assertEquals("t", token.getType());
+    Assert.assertEquals(expires, token.getExpires());
+    Assert.assertFalse(token.isExpired());
     Thread.sleep(70);               // +20 msec fuzz for timer granularity.
     Thread.sleep(70);               // +20 msec fuzz for timer granularity.
-    assertTrue(token.isExpired());
+    Assert.assertTrue(token.isExpired());
   }
   }
 
 
+  @Test
   public void testParseInvalid() throws Exception {
   public void testParseInvalid() throws Exception {
     long expires = System.currentTimeMillis() + 50;
     long expires = System.currentTimeMillis() + 50;
     AuthenticationToken token = new AuthenticationToken("u", "p", "t");
     AuthenticationToken token = new AuthenticationToken("u", "p", "t");
@@ -114,11 +120,11 @@ public class TestAuthenticationToken extends TestCase {
     str = str.substring(0, str.indexOf("e="));
     str = str.substring(0, str.indexOf("e="));
     try {
     try {
       AuthenticationToken.parse(str);
       AuthenticationToken.parse(str);
-      fail();
+      Assert.fail();
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {
       // Expected
       // Expected
     } catch (Exception ex) {
     } catch (Exception ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
 }
 }

+ 45 - 31
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java

@@ -13,25 +13,31 @@
  */
  */
 package org.apache.hadoop.security.authentication.server;
 package org.apache.hadoop.security.authentication.server;
 
 
+import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
 import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
-import junit.framework.TestCase;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.GSSName;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
 import org.ietf.jgss.Oid;
 import org.ietf.jgss.Oid;
 
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
+import java.io.File;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.concurrent.Callable;
 import java.util.concurrent.Callable;
 
 
-public class TestKerberosAuthenticationHandler extends TestCase {
+public class TestKerberosAuthenticationHandler
+    extends KerberosSecurityTestcase {
 
 
   protected KerberosAuthenticationHandler handler;
   protected KerberosAuthenticationHandler handler;
 
 
@@ -54,9 +60,16 @@ public class TestKerberosAuthenticationHandler extends TestCase {
     return props;
     return props;
   }
   }
 
 
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
+  @Before
+  public void setup() throws Exception {
+    // create keytab
+    File keytabFile = new File(KerberosTestUtils.getKeytabFile());
+    String clientPrincipal = KerberosTestUtils.getClientPrincipal();
+    String serverPrincipal = KerberosTestUtils.getServerPrincipal();
+    clientPrincipal = clientPrincipal.substring(0, clientPrincipal.lastIndexOf("@"));
+    serverPrincipal = serverPrincipal.substring(0, serverPrincipal.lastIndexOf("@"));
+    getKdc().createPrincipal(keytabFile, clientPrincipal, serverPrincipal);
+    // handler
     handler = getNewAuthenticationHandler();
     handler = getNewAuthenticationHandler();
     Properties props = getDefaultProperties();
     Properties props = getDefaultProperties();
     try {
     try {
@@ -67,18 +80,10 @@ public class TestKerberosAuthenticationHandler extends TestCase {
     }
     }
   }
   }
 
 
-  @Override
-  protected void tearDown() throws Exception {
-    if (handler != null) {
-      handler.destroy();
-      handler = null;
-    }
-    super.tearDown();
-  }
-
+  @Test(timeout=60000)
   public void testNameRules() throws Exception {
   public void testNameRules() throws Exception {
     KerberosName kn = new KerberosName(KerberosTestUtils.getServerPrincipal());
     KerberosName kn = new KerberosName(KerberosTestUtils.getServerPrincipal());
-    assertEquals(KerberosTestUtils.getRealm(), kn.getRealm());
+    Assert.assertEquals(KerberosTestUtils.getRealm(), kn.getRealm());
 
 
     //destroy handler created in setUp()
     //destroy handler created in setUp()
     handler.destroy();
     handler.destroy();
@@ -93,30 +98,32 @@ public class TestKerberosAuthenticationHandler extends TestCase {
     } catch (Exception ex) {
     } catch (Exception ex) {
     }
     }
     kn = new KerberosName("bar@BAR");
     kn = new KerberosName("bar@BAR");
-    assertEquals("bar", kn.getShortName());
+    Assert.assertEquals("bar", kn.getShortName());
     kn = new KerberosName("bar@FOO");
     kn = new KerberosName("bar@FOO");
     try {
     try {
       kn.getShortName();
       kn.getShortName();
-      fail();
+      Assert.fail();
     }
     }
     catch (Exception ex) {      
     catch (Exception ex) {      
     }
     }
   }
   }
-  
+
+  @Test(timeout=60000)
   public void testInit() throws Exception {
   public void testInit() throws Exception {
-    assertEquals(KerberosTestUtils.getServerPrincipal(), handler.getPrincipal());
-    assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
+    Assert.assertEquals(KerberosTestUtils.getServerPrincipal(), handler.getPrincipal());
+    Assert.assertEquals(KerberosTestUtils.getKeytabFile(), handler.getKeytab());
   }
   }
 
 
+  @Test(timeout=60000)
   public void testType() throws Exception {
   public void testType() throws Exception {
-    assertEquals(getExpectedType(), handler.getType());
+    Assert.assertEquals(getExpectedType(), handler.getType());
   }
   }
 
 
   public void testRequestWithoutAuthorization() throws Exception {
   public void testRequestWithoutAuthorization() throws Exception {
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
 
 
-    assertNull(handler.authenticate(request, response));
+    Assert.assertNull(handler.authenticate(request, response));
     Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
     Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
     Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
     Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
   }
   }
@@ -126,11 +133,12 @@ public class TestKerberosAuthenticationHandler extends TestCase {
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
 
 
     Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION)).thenReturn("invalid");
     Mockito.when(request.getHeader(KerberosAuthenticator.AUTHORIZATION)).thenReturn("invalid");
-    assertNull(handler.authenticate(request, response));
+    Assert.assertNull(handler.authenticate(request, response));
     Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
     Mockito.verify(response).setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
     Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
     Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
   }
   }
 
 
+  @Test(timeout=60000)
   public void testRequestWithIncompleteAuthorization() throws Exception {
   public void testRequestWithIncompleteAuthorization() throws Exception {
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
@@ -139,15 +147,14 @@ public class TestKerberosAuthenticationHandler extends TestCase {
       .thenReturn(KerberosAuthenticator.NEGOTIATE);
       .thenReturn(KerberosAuthenticator.NEGOTIATE);
     try {
     try {
       handler.authenticate(request, response);
       handler.authenticate(request, response);
-      fail();
+      Assert.fail();
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {
       // Expected
       // Expected
     } catch (Exception ex) {
     } catch (Exception ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
 
 
-
   public void testRequestWithAuthorization() throws Exception {
   public void testRequestWithAuthorization() throws Exception {
     String token = KerberosTestUtils.doAsClient(new Callable<String>() {
     String token = KerberosTestUtils.doAsClient(new Callable<String>() {
       @Override
       @Override
@@ -191,9 +198,9 @@ public class TestKerberosAuthenticationHandler extends TestCase {
                                          Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
                                          Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
       Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
       Mockito.verify(response).setStatus(HttpServletResponse.SC_OK);
 
 
-      assertEquals(KerberosTestUtils.getClientPrincipal(), authToken.getName());
-      assertTrue(KerberosTestUtils.getClientPrincipal().startsWith(authToken.getUserName()));
-      assertEquals(getExpectedType(), authToken.getType());
+      Assert.assertEquals(KerberosTestUtils.getClientPrincipal(), authToken.getName());
+      Assert.assertTrue(KerberosTestUtils.getClientPrincipal().startsWith(authToken.getUserName()));
+      Assert.assertEquals(getExpectedType(), authToken.getType());
     } else {
     } else {
       Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
       Mockito.verify(response).setHeader(Mockito.eq(KerberosAuthenticator.WWW_AUTHENTICATE),
                                          Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
                                          Mockito.matches(KerberosAuthenticator.NEGOTIATE + " .*"));
@@ -213,12 +220,19 @@ public class TestKerberosAuthenticationHandler extends TestCase {
 
 
     try {
     try {
       handler.authenticate(request, response);
       handler.authenticate(request, response);
-      fail();
+      Assert.fail();
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {
       // Expected
       // Expected
     } catch (Exception ex) {
     } catch (Exception ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
 
 
+  @After
+  public void tearDown() throws Exception {
+    if (handler != null) {
+      handler.destroy();
+      handler = null;
+    }
+  }
 }
 }

+ 18 - 11
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java

@@ -14,33 +14,37 @@
 package org.apache.hadoop.security.authentication.server;
 package org.apache.hadoop.security.authentication.server;
 
 
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
-import junit.framework.TestCase;
 import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
 import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
+import org.junit.Assert;
+import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.Mockito;
 
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpServletResponse;
 import java.util.Properties;
 import java.util.Properties;
 
 
-public class TestPseudoAuthenticationHandler extends TestCase {
+public class TestPseudoAuthenticationHandler {
 
 
+  @Test
   public void testInit() throws Exception {
   public void testInit() throws Exception {
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     try {
     try {
       Properties props = new Properties();
       Properties props = new Properties();
       props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
       props.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
       handler.init(props);
       handler.init(props);
-      assertEquals(false, handler.getAcceptAnonymous());
+      Assert.assertEquals(false, handler.getAcceptAnonymous());
     } finally {
     } finally {
       handler.destroy();
       handler.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testType() throws Exception {
   public void testType() throws Exception {
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
-    assertEquals(PseudoAuthenticationHandler.TYPE, handler.getType());
+    Assert.assertEquals(PseudoAuthenticationHandler.TYPE, handler.getType());
   }
   }
 
 
+  @Test
   public void testAnonymousOn() throws Exception {
   public void testAnonymousOn() throws Exception {
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     try {
     try {
@@ -53,12 +57,13 @@ public class TestPseudoAuthenticationHandler extends TestCase {
 
 
       AuthenticationToken token = handler.authenticate(request, response);
       AuthenticationToken token = handler.authenticate(request, response);
 
 
-      assertEquals(AuthenticationToken.ANONYMOUS, token);
+      Assert.assertEquals(AuthenticationToken.ANONYMOUS, token);
     } finally {
     } finally {
       handler.destroy();
       handler.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testAnonymousOff() throws Exception {
   public void testAnonymousOff() throws Exception {
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     PseudoAuthenticationHandler handler = new PseudoAuthenticationHandler();
     try {
     try {
@@ -70,11 +75,11 @@ public class TestPseudoAuthenticationHandler extends TestCase {
       HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
       HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
 
 
       handler.authenticate(request, response);
       handler.authenticate(request, response);
-      fail();
+      Assert.fail();
     } catch (AuthenticationException ex) {
     } catch (AuthenticationException ex) {
       // Expected
       // Expected
     } catch (Exception ex) {
     } catch (Exception ex) {
-      fail();
+      Assert.fail();
     } finally {
     } finally {
       handler.destroy();
       handler.destroy();
     }
     }
@@ -93,19 +98,21 @@ public class TestPseudoAuthenticationHandler extends TestCase {
 
 
       AuthenticationToken token = handler.authenticate(request, response);
       AuthenticationToken token = handler.authenticate(request, response);
 
 
-      assertNotNull(token);
-      assertEquals("user", token.getUserName());
-      assertEquals("user", token.getName());
-      assertEquals(PseudoAuthenticationHandler.TYPE, token.getType());
+      Assert.assertNotNull(token);
+      Assert.assertEquals("user", token.getUserName());
+      Assert.assertEquals("user", token.getName());
+      Assert.assertEquals(PseudoAuthenticationHandler.TYPE, token.getType());
     } finally {
     } finally {
       handler.destroy();
       handler.destroy();
     }
     }
   }
   }
 
 
+  @Test
   public void testUserNameAnonymousOff() throws Exception {
   public void testUserNameAnonymousOff() throws Exception {
     _testUserName(false);
     _testUserName(false);
   }
   }
 
 
+  @Test
   public void testUserNameAnonymousOn() throws Exception {
   public void testUserNameAnonymousOn() throws Exception {
     _testUserName(true);
     _testUserName(true);
   }
   }

+ 15 - 4
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosName.java

@@ -21,14 +21,19 @@ package org.apache.hadoop.security.authentication.util;
 import java.io.IOException;
 import java.io.IOException;
 
 
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
+import org.junit.After;
 import org.junit.Before;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
-import static org.junit.Assert.*;
+
+import org.junit.Assert;
 
 
 public class TestKerberosName {
 public class TestKerberosName {
 
 
   @Before
   @Before
   public void setUp() throws Exception {
   public void setUp() throws Exception {
+    System.setProperty("java.security.krb5.realm", KerberosTestUtils.getRealm());
+    System.setProperty("java.security.krb5.kdc", "localhost:88");
+
     String rules =
     String rules =
       "RULE:[1:$1@$0](.*@YAHOO\\.COM)s/@.*//\n" +
       "RULE:[1:$1@$0](.*@YAHOO\\.COM)s/@.*//\n" +
       "RULE:[2:$1](johndoe)s/^.*$/guest/\n" +
       "RULE:[2:$1](johndoe)s/^.*$/guest/\n" +
@@ -44,7 +49,7 @@ public class TestKerberosName {
     KerberosName nm = new KerberosName(from);
     KerberosName nm = new KerberosName(from);
     String simple = nm.getShortName();
     String simple = nm.getShortName();
     System.out.println("to " + simple);
     System.out.println("to " + simple);
-    assertEquals("short name incorrect", to, simple);
+    Assert.assertEquals("short name incorrect", to, simple);
   }
   }
 
 
   @Test
   @Test
@@ -61,7 +66,7 @@ public class TestKerberosName {
     System.out.println("Checking " + name + " to ensure it is bad.");
     System.out.println("Checking " + name + " to ensure it is bad.");
     try {
     try {
       new KerberosName(name);
       new KerberosName(name);
-      fail("didn't get exception for " + name);
+      Assert.fail("didn't get exception for " + name);
     } catch (IllegalArgumentException iae) {
     } catch (IllegalArgumentException iae) {
       // PASS
       // PASS
     }
     }
@@ -72,7 +77,7 @@ public class TestKerberosName {
     KerberosName nm = new KerberosName(from);
     KerberosName nm = new KerberosName(from);
     try {
     try {
       nm.getShortName();
       nm.getShortName();
-      fail("didn't get exception for " + from);
+      Assert.fail("didn't get exception for " + from);
     } catch (IOException ie) {
     } catch (IOException ie) {
       // PASS
       // PASS
     }
     }
@@ -85,4 +90,10 @@ public class TestKerberosName {
     checkBadTranslation("foo@ACME.COM");
     checkBadTranslation("foo@ACME.COM");
     checkBadTranslation("root/joe@FOO.COM");
     checkBadTranslation("root/joe@FOO.COM");
   }
   }
+
+  @After
+  public void clear() {
+    System.clearProperty("java.security.krb5.realm");
+    System.clearProperty("java.security.krb5.kdc");
+  }
 }
 }

+ 6 - 7
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java

@@ -16,11 +16,10 @@
  */
  */
 package org.apache.hadoop.security.authentication.util;
 package org.apache.hadoop.security.authentication.util;
 
 
-import static org.junit.Assert.*;
+import org.junit.Assert;
 
 
 import java.io.IOException;
 import java.io.IOException;
 
 
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.junit.Test;
 import org.junit.Test;
 
 
 public class TestKerberosUtil {
 public class TestKerberosUtil {
@@ -32,23 +31,23 @@ public class TestKerberosUtil {
     String testHost = "FooBar";
     String testHost = "FooBar";
 
 
     // send null hostname
     // send null hostname
-    assertEquals("When no hostname is sent",
+    Assert.assertEquals("When no hostname is sent",
         service + "/" + localHostname.toLowerCase(),
         service + "/" + localHostname.toLowerCase(),
         KerberosUtil.getServicePrincipal(service, null));
         KerberosUtil.getServicePrincipal(service, null));
     // send empty hostname
     // send empty hostname
-    assertEquals("When empty hostname is sent",
+    Assert.assertEquals("When empty hostname is sent",
         service + "/" + localHostname.toLowerCase(),
         service + "/" + localHostname.toLowerCase(),
         KerberosUtil.getServicePrincipal(service, ""));
         KerberosUtil.getServicePrincipal(service, ""));
     // send 0.0.0.0 hostname
     // send 0.0.0.0 hostname
-    assertEquals("When 0.0.0.0 hostname is sent",
+    Assert.assertEquals("When 0.0.0.0 hostname is sent",
         service + "/" + localHostname.toLowerCase(),
         service + "/" + localHostname.toLowerCase(),
         KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
         KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
     // send uppercase hostname
     // send uppercase hostname
-    assertEquals("When uppercase hostname is sent",
+    Assert.assertEquals("When uppercase hostname is sent",
         service + "/" + testHost.toLowerCase(),
         service + "/" + testHost.toLowerCase(),
         KerberosUtil.getServicePrincipal(service, testHost));
         KerberosUtil.getServicePrincipal(service, testHost));
     // send lowercase hostname
     // send lowercase hostname
-    assertEquals("When lowercase hostname is sent",
+    Assert.assertEquals("When lowercase hostname is sent",
         service + "/" + testHost.toLowerCase(),
         service + "/" + testHost.toLowerCase(),
         KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
         KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
   }
   }

+ 21 - 15
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestSigner.java

@@ -13,68 +13,75 @@
  */
  */
 package org.apache.hadoop.security.authentication.util;
 package org.apache.hadoop.security.authentication.util;
 
 
-import junit.framework.TestCase;
+import org.junit.Assert;
+import org.junit.Test;
 
 
-public class TestSigner extends TestCase {
+public class TestSigner {
 
 
+  @Test
   public void testNoSecret() throws Exception {
   public void testNoSecret() throws Exception {
     try {
     try {
       new Signer(null);
       new Signer(null);
-      fail();
+      Assert.fail();
     }
     }
     catch (IllegalArgumentException ex) {
     catch (IllegalArgumentException ex) {
     }
     }
   }
   }
 
 
+  @Test
   public void testNullAndEmptyString() throws Exception {
   public void testNullAndEmptyString() throws Exception {
     Signer signer = new Signer("secret".getBytes());
     Signer signer = new Signer("secret".getBytes());
     try {
     try {
       signer.sign(null);
       signer.sign(null);
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
     try {
     try {
       signer.sign("");
       signer.sign("");
-      fail();
+      Assert.fail();
     } catch (IllegalArgumentException ex) {
     } catch (IllegalArgumentException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
 
 
+  @Test
   public void testSignature() throws Exception {
   public void testSignature() throws Exception {
     Signer signer = new Signer("secret".getBytes());
     Signer signer = new Signer("secret".getBytes());
     String s1 = signer.sign("ok");
     String s1 = signer.sign("ok");
     String s2 = signer.sign("ok");
     String s2 = signer.sign("ok");
     String s3 = signer.sign("wrong");
     String s3 = signer.sign("wrong");
-    assertEquals(s1, s2);
-    assertNotSame(s1, s3);
+    Assert.assertEquals(s1, s2);
+    Assert.assertNotSame(s1, s3);
   }
   }
 
 
+  @Test
   public void testVerify() throws Exception {
   public void testVerify() throws Exception {
     Signer signer = new Signer("secret".getBytes());
     Signer signer = new Signer("secret".getBytes());
     String t = "test";
     String t = "test";
     String s = signer.sign(t);
     String s = signer.sign(t);
     String e = signer.verifyAndExtract(s);
     String e = signer.verifyAndExtract(s);
-    assertEquals(t, e);
+    Assert.assertEquals(t, e);
   }
   }
 
 
+  @Test
   public void testInvalidSignedText() throws Exception {
   public void testInvalidSignedText() throws Exception {
     Signer signer = new Signer("secret".getBytes());
     Signer signer = new Signer("secret".getBytes());
     try {
     try {
       signer.verifyAndExtract("test");
       signer.verifyAndExtract("test");
-      fail();
+      Assert.fail();
     } catch (SignerException ex) {
     } catch (SignerException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
 
 
+  @Test
   public void testTampering() throws Exception {
   public void testTampering() throws Exception {
     Signer signer = new Signer("secret".getBytes());
     Signer signer = new Signer("secret".getBytes());
     String t = "test";
     String t = "test";
@@ -82,12 +89,11 @@ public class TestSigner extends TestCase {
     s += "x";
     s += "x";
     try {
     try {
       signer.verifyAndExtract(s);
       signer.verifyAndExtract(s);
-      fail();
+      Assert.fail();
     } catch (SignerException ex) {
     } catch (SignerException ex) {
       // Expected
       // Expected
     } catch (Throwable ex) {
     } catch (Throwable ex) {
-      fail();
+      Assert.fail();
     }
     }
   }
   }
-
 }
 }

+ 0 - 28
hadoop-common-project/hadoop-auth/src/test/resources/krb5.conf

@@ -1,28 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# 
-[libdefaults]
-	default_realm = ${kerberos.realm}
-	udp_preference_limit = 1
-	extra_addresses = 127.0.0.1
-[realms]
-	${kerberos.realm} = {
-		admin_server = localhost:88
-		kdc = localhost:88
-	}
-[domain_realm]
-	localhost = ${kerberos.realm}

+ 69 - 1
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -291,6 +291,8 @@ Release 2.3.0 - UNRELEASED
 
 
   IMPROVEMENTS
   IMPROVEMENTS
 
 
+    HADOOP-9784. Add a builder for HttpServer. (Junping Du via llu)
+
     HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
     HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
     (Junping Du via llu)
     (Junping Du via llu)
 
 
@@ -316,6 +318,21 @@ Release 2.3.0 - UNRELEASED
     HADOOP-9848. Create a MiniKDC for use with security testing. 
     HADOOP-9848. Create a MiniKDC for use with security testing. 
     (ywskycn via tucu)
     (ywskycn via tucu)
 
 
+    HADOOP-9860. Remove class HackedKeytab and HackedKeytabEncoder from 
+    hadoop-minikdc once jira DIRSERVER-1882 solved. (ywskycn via tucu)
+
+    HADOOP-9866. convert hadoop-auth testcases requiring kerberos to 
+    use minikdc. (ywskycn via tucu)
+
+    HADOOP-9487 Deprecation warnings in Configuration should go to their
+    own log or otherwise be suppressible (Chu Tong via stevel)
+
+    HADOOP-9889. Refresh the Krb5 configuration when creating a new kdc in
+    Hadoop-MiniKDC (Wei Yan via Sandy Ryza)
+
+    HADOOP-9915.  o.a.h.fs.Stat support on Mac OS X  (Binglin Chang via Colin
+    Patrick McCabe)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
     HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
     HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
@@ -334,6 +351,17 @@ Release 2.3.0 - UNRELEASED
     HADOOP-9652.  RawLocalFs#getFileLinkStatus does not fill in the link owner
     HADOOP-9652.  RawLocalFs#getFileLinkStatus does not fill in the link owner
     and mode.  (Andrew Wang via Colin Patrick McCabe)
     and mode.  (Andrew Wang via Colin Patrick McCabe)
 
 
+    HADOOP-9875.  TestDoAsEffectiveUser can fail on JDK 7.  (Aaron T. Myers via
+    Colin Patrick McCabe)
+
+    HADOOP-9865.  FileContext#globStatus has a regression with respect to
+    relative path.  (Chuan Lin via Colin Patrick McCabe)
+
+
+    HADOOP-9909. org.apache.hadoop.fs.Stat should permit other LANG.
+    (Shinichi Yamashita via Andrew Wang)
+
+    HADOOP-9908. Fix NPE when versioninfo properties file is missing (todd)
 
 
 Release 2.1.1-beta - UNRELEASED
 Release 2.1.1-beta - UNRELEASED
 
 
@@ -343,6 +371,9 @@ Release 2.1.1-beta - UNRELEASED
 
 
   IMPROVEMENTS
   IMPROVEMENTS
 
 
+    HADOOP-9910. proxy server start and stop documentation wrong
+    (Andre Kelpe via harsh)
+
     HADOOP-9446. Support Kerberos SPNEGO for IBM JDK. (Yu Gao via llu)
     HADOOP-9446. Support Kerberos SPNEGO for IBM JDK. (Yu Gao via llu)
  
  
     HADOOP-9787. ShutdownHelper util to shutdown threads and threadpools.
     HADOOP-9787. ShutdownHelper util to shutdown threads and threadpools.
@@ -363,10 +394,23 @@ Release 2.1.1-beta - UNRELEASED
 
 
     HADOOP-9802. Support Snappy codec on Windows. (cnauroth)
     HADOOP-9802. Support Snappy codec on Windows. (cnauroth)
 
 
+    HADOOP-9879. Move the version info of zookeeper dependencies to
+    hadoop-project/pom (Karthik Kambatla via Sandy Ryza)
+    
+    HADOOP-9886. Turn warning message in RetryInvocationHandler to debug (arpit)
+
+    HADOOP-9906. Move HAZKUtil to o.a.h.util.ZKUtil and make inner-classes
+    public (Karthik Kambatla via Sandy Ryza)
+
+    HADOOP-9918. Add addIfService to CompositeService (Karthik Kambatla via
+    Sandy Ryza)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
   BUG FIXES
   BUG FIXES
 
 
+    HADOOP-9916. Fix race in ipc.Client retry. (Binglin Chang via llu)
+
     HADOOP-9768. chown and chgrp reject users and groups with spaces on platforms
     HADOOP-9768. chown and chgrp reject users and groups with spaces on platforms
     where spaces are otherwise acceptable. (cnauroth)
     where spaces are otherwise acceptable. (cnauroth)
 
 
@@ -391,7 +435,26 @@ Release 2.1.1-beta - UNRELEASED
 
 
     HADOOP-9381. Document dfs cp -f option. (Keegan Witt, suresh via suresh)
     HADOOP-9381. Document dfs cp -f option. (Keegan Witt, suresh via suresh)
 
 
-Release 2.1.0-beta - 2013-08-06
+    HADOOP-9868. Server must not advertise kerberos realm. (daryn via kihwal)
+
+    HADOOP-9880. SASL changes from HADOOP-9421 breaks Secure HA NN. (daryn via
+    jing9)
+
+    HADOOP-9887. globStatus does not correctly handle paths starting with a drive
+    spec on Windows. (Chuan Liu via cnauroth)
+
+    HADOOP-9894.  Race condition in Shell leads to logged error stream handling
+    exceptions (Arpit Agarwal)
+
+    HADOOP-9774. RawLocalFileSystem.listStatus() return absolute paths when
+    input path is relative on Windows. (Shanyu Zhao via ivanmi)
+
+    HADOOP-9924. FileUtil.createJarWithClassPath() does not generate relative
+    classpath correctly. (Shanyu Zhao via ivanmi)
+
+    HADOOP-9932. Improper synchronization in RetryCache. (kihwal)
+
+Release 2.1.0-beta - 2013-08-22
 
 
   INCOMPATIBLE CHANGES
   INCOMPATIBLE CHANGES
 
 
@@ -2049,6 +2112,11 @@ Release 0.23.10 - UNRELEASED
 
 
   IMPROVEMENTS
   IMPROVEMENTS
 
 
+    HADOOP-9686. Easy access to final parameters in Configuration (Jason Lowe
+    via jeagles)
+
+    HADOOP-8704. add request logging to jetty/httpserver (jeagles)
+
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
   BUG FIXES
   BUG FIXES

+ 1 - 5
hadoop-common-project/hadoop-common/pom.xml

@@ -217,7 +217,6 @@
     <dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <artifactId>zookeeper</artifactId>
-      <version>3.4.2</version>
       <exclusions>
       <exclusions>
         <exclusion>
         <exclusion>
           <groupId>jline</groupId>
           <groupId>jline</groupId>
@@ -245,7 +244,6 @@
     <dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <artifactId>zookeeper</artifactId>
-      <version>3.4.2</version>
       <type>test-jar</type>
       <type>test-jar</type>
       <scope>test</scope>
       <scope>test</scope>
     </dependency>
     </dependency>
@@ -782,9 +780,7 @@
             <groupId>org.apache.maven.plugins</groupId>
             <groupId>org.apache.maven.plugins</groupId>
             <artifactId>maven-surefire-plugin</artifactId>
             <artifactId>maven-surefire-plugin</artifactId>
             <configuration>
             <configuration>
-              <forkMode>perthread</forkMode>
-              <threadCount>${testsThreadCount}</threadCount>
-              <parallel>classes</parallel>
+              <forkCount>${testsThreadCount}</forkCount>
               <argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError -DminiClusterDedicatedDirs=true</argLine>
               <argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError -DminiClusterDedicatedDirs=true</argLine>
             </configuration>
             </configuration>
           </plugin>
           </plugin>

+ 34 - 1
hadoop-common-project/hadoop-common/src/main/conf/log4j.properties

@@ -130,6 +130,13 @@ log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
 log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
 log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
 
 
+#
+# hadoop configuration logging
+#
+
+# Uncomment the following line to turn off configuration deprecation warnings.
+# log4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN
+
 #
 #
 # hdfs audit logging
 # hdfs audit logging
 #
 #
@@ -231,4 +238,30 @@ log4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
 #log4j.appender.HSAUDIT.File=${hadoop.log.dir}/hs-audit.log
 #log4j.appender.HSAUDIT.File=${hadoop.log.dir}/hs-audit.log
 #log4j.appender.HSAUDIT.layout=org.apache.log4j.PatternLayout
 #log4j.appender.HSAUDIT.layout=org.apache.log4j.PatternLayout
 #log4j.appender.HSAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
 #log4j.appender.HSAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
-#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
+#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
+
+# Http Server Request Logs
+#log4j.logger.http.requests.namenode=INFO,namenoderequestlog
+#log4j.appender.namenoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
+#log4j.appender.namenoderequestlog.Filename=${hadoop.log.dir}/jetty-namenode-yyyy_mm_dd.log
+#log4j.appender.namenoderequestlog.RetainDays=3
+
+#log4j.logger.http.requests.datanode=INFO,datanoderequestlog
+#log4j.appender.datanoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
+#log4j.appender.datanoderequestlog.Filename=${hadoop.log.dir}/jetty-datanode-yyyy_mm_dd.log
+#log4j.appender.datanoderequestlog.RetainDays=3
+
+#log4j.logger.http.requests.resourcemanager=INFO,resourcemanagerrequestlog
+#log4j.appender.resourcemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
+#log4j.appender.resourcemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-resourcemanager-yyyy_mm_dd.log
+#log4j.appender.resourcemanagerrequestlog.RetainDays=3
+
+#log4j.logger.http.requests.jobhistory=INFO,jobhistoryrequestlog
+#log4j.appender.jobhistoryrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
+#log4j.appender.jobhistoryrequestlog.Filename=${hadoop.log.dir}/jetty-jobhistory-yyyy_mm_dd.log
+#log4j.appender.jobhistoryrequestlog.RetainDays=3
+
+#log4j.logger.http.requests.nodemanager=INFO,nodemanagerrequestlog
+#log4j.appender.nodemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
+#log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log
+#log4j.appender.nodemanagerrequestlog.RetainDays=3

+ 17 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -153,6 +153,10 @@ import com.google.common.base.Preconditions;
  * will be resolved to another property in this Configuration, while
  * will be resolved to another property in this Configuration, while
  * <tt>${<i>user.name</i>}</tt> would then ordinarily be resolved to the value
  * <tt>${<i>user.name</i>}</tt> would then ordinarily be resolved to the value
  * of the System property with that name.
  * of the System property with that name.
+ * By default, warnings will be given to any deprecated configuration 
+ * parameters and these are suppressible by configuring
+ * <tt>log4j.logger.org.apache.hadoop.conf.Configuration.deprecation</tt> in
+ * log4j.properties file.
  */
  */
 @InterfaceAudience.Public
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 @InterfaceStability.Stable
@@ -161,6 +165,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   private static final Log LOG =
   private static final Log LOG =
     LogFactory.getLog(Configuration.class);
     LogFactory.getLog(Configuration.class);
 
 
+  private static final Log LOG_DEPRECATION =
+    LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation");
+
   private boolean quietmode = true;
   private boolean quietmode = true;
   
   
   private static class Resource {
   private static class Resource {
@@ -836,7 +843,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   private void warnOnceIfDeprecated(String name) {
   private void warnOnceIfDeprecated(String name) {
     DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
     DeprecatedKeyInfo keyInfo = deprecatedKeyMap.get(name);
     if (keyInfo != null && !keyInfo.accessed) {
     if (keyInfo != null && !keyInfo.accessed) {
-      LOG.warn(keyInfo.getWarningMessage(name));
+      LOG_DEPRECATION.info(keyInfo.getWarningMessage(name));
     }
     }
   }
   }
 
 
@@ -1911,6 +1918,15 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     }
     }
   }
   }
 
 
+  /**
+   * Get the set of parameters marked final.
+   *
+   * @return final parameter set.
+   */
+  public Set<String> getFinalParameters() {
+    return new HashSet<String>(finalParameters);
+  }
+
   protected synchronized Properties getProps() {
   protected synchronized Properties getProps() {
     if (properties == null) {
     if (properties == null) {
       properties = new Properties();
       properties = new Properties();

+ 11 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java

@@ -1239,6 +1239,9 @@ public class FileUtil {
     List<String> classPathEntryList = new ArrayList<String>(
     List<String> classPathEntryList = new ArrayList<String>(
       classPathEntries.length);
       classPathEntries.length);
     for (String classPathEntry: classPathEntries) {
     for (String classPathEntry: classPathEntries) {
+      if (classPathEntry.length() == 0) {
+        continue;
+      }
       if (classPathEntry.endsWith("*")) {
       if (classPathEntry.endsWith("*")) {
         // Append all jars that match the wildcard
         // Append all jars that match the wildcard
         Path globPath = new Path(classPathEntry).suffix("{.jar,.JAR}");
         Path globPath = new Path(classPathEntry).suffix("{.jar,.JAR}");
@@ -1252,7 +1255,14 @@ public class FileUtil {
         }
         }
       } else {
       } else {
         // Append just this entry
         // Append just this entry
-        String classPathEntryUrl = new File(classPathEntry).toURI().toURL()
+        File fileCpEntry = null;
+        if(!new Path(classPathEntry).isAbsolute()) {
+          fileCpEntry = new File(workingDir, classPathEntry);
+        }
+        else {
+          fileCpEntry = new File(classPathEntry);
+        }
+        String classPathEntryUrl = fileCpEntry.toURI().toURL()
           .toExternalForm();
           .toExternalForm();
 
 
         // File.toURI only appends trailing '/' if it can determine that it is a
         // File.toURI only appends trailing '/' if it can determine that it is a

+ 20 - 10
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java

@@ -85,7 +85,7 @@ class Globber {
   /**
   /**
    * Translate an absolute path into a list of path components.
    * Translate an absolute path into a list of path components.
    * We merge double slashes into a single slash here.
    * We merge double slashes into a single slash here.
-   * The first path component (i.e. root) does not get an entry in the list.
+   * POSIX root path, i.e. '/', does not get an entry in the list.
    */
    */
   private static List<String> getPathComponents(String path)
   private static List<String> getPathComponents(String path)
       throws IOException {
       throws IOException {
@@ -99,24 +99,24 @@ class Globber {
   }
   }
 
 
   private String schemeFromPath(Path path) throws IOException {
   private String schemeFromPath(Path path) throws IOException {
-    String scheme = pathPattern.toUri().getScheme();
+    String scheme = path.toUri().getScheme();
     if (scheme == null) {
     if (scheme == null) {
       if (fs != null) {
       if (fs != null) {
         scheme = fs.getUri().getScheme();
         scheme = fs.getUri().getScheme();
       } else {
       } else {
-        scheme = fc.getFSofPath(path).getUri().getScheme();
+        scheme = fc.getDefaultFileSystem().getUri().getScheme();
       }
       }
     }
     }
     return scheme;
     return scheme;
   }
   }
 
 
   private String authorityFromPath(Path path) throws IOException {
   private String authorityFromPath(Path path) throws IOException {
-    String authority = pathPattern.toUri().getAuthority();
+    String authority = path.toUri().getAuthority();
     if (authority == null) {
     if (authority == null) {
       if (fs != null) {
       if (fs != null) {
         authority = fs.getUri().getAuthority();
         authority = fs.getUri().getAuthority();
       } else {
       } else {
-        authority = fc.getFSofPath(path).getUri().getAuthority();
+        authority = fc.getDefaultFileSystem().getUri().getAuthority();
       }
       }
     }
     }
     return authority ;
     return authority ;
@@ -143,8 +143,8 @@ class Globber {
       // Get the absolute path for this flattened pattern.  We couldn't do 
       // Get the absolute path for this flattened pattern.  We couldn't do 
       // this prior to flattening because of patterns like {/,a}, where which
       // this prior to flattening because of patterns like {/,a}, where which
       // path you go down influences how the path must be made absolute.
       // path you go down influences how the path must be made absolute.
-      Path absPattern =
-          fixRelativePart(new Path(flatPattern .isEmpty() ? "." : flatPattern ));
+      Path absPattern = fixRelativePart(new Path(
+          flatPattern.isEmpty() ? Path.CUR_DIR : flatPattern));
       // Now we break the flattened, absolute pattern into path components.
       // Now we break the flattened, absolute pattern into path components.
       // For example, /a/*/c would be broken into the list [a, *, c]
       // For example, /a/*/c would be broken into the list [a, *, c]
       List<String> components =
       List<String> components =
@@ -152,9 +152,19 @@ class Globber {
       // Starting out at the root of the filesystem, we try to match
       // Starting out at the root of the filesystem, we try to match
       // filesystem entries against pattern components.
       // filesystem entries against pattern components.
       ArrayList<FileStatus> candidates = new ArrayList<FileStatus>(1);
       ArrayList<FileStatus> candidates = new ArrayList<FileStatus>(1);
-      candidates.add(new FileStatus(0, true, 0, 0, 0,
-          new Path(scheme, authority, "/")));
-
+      if (Path.WINDOWS && !components.isEmpty()
+          && Path.isWindowsAbsolutePath(absPattern.toUri().getPath(), true)) {
+        // On Windows the path could begin with a drive letter, e.g. /E:/foo.
+        // We will skip matching the drive letter and start from listing the
+        // root of the filesystem on that drive.
+        String driveLetter = components.remove(0);
+        candidates.add(new FileStatus(0, true, 0, 0, 0, new Path(scheme,
+            authority, Path.SEPARATOR + driveLetter + Path.SEPARATOR)));
+      } else {
+        candidates.add(new FileStatus(0, true, 0, 0, 0,
+            new Path(scheme, authority, Path.SEPARATOR)));
+      }
+      
       for (String component : components) {
       for (String component : components) {
         ArrayList<FileStatus> newCandidates =
         ArrayList<FileStatus> newCandidates =
             new ArrayList<FileStatus>(candidates.size());
             new ArrayList<FileStatus>(candidates.size());

+ 12 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java

@@ -182,6 +182,18 @@ public class Path implements Comparable {
   /** Construct a Path from components. */
   /** Construct a Path from components. */
   public Path(String scheme, String authority, String path) {
   public Path(String scheme, String authority, String path) {
     checkPathArg( path );
     checkPathArg( path );
+
+    // add a slash in front of paths with Windows drive letters
+    if (hasWindowsDrive(path) && path.charAt(0) != '/') {
+      path = "/" + path;
+    }
+
+    // add "./" in front of Linux relative paths so that a path containing
+    // a colon e.q. "a:b" will not be interpreted as scheme "a".
+    if (!WINDOWS && path.charAt(0) != '/') {
+      path = "./" + path;
+    }
+
     initialize(scheme, authority, path, null);
     initialize(scheme, authority, path, null);
   }
   }
 
 

+ 4 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java

@@ -393,7 +393,7 @@ public class RawLocalFileSystem extends FileSystem {
         new DeprecatedRawLocalFileStatus(localf, getDefaultBlockSize(f), this)};
         new DeprecatedRawLocalFileStatus(localf, getDefaultBlockSize(f), this)};
     }
     }
 
 
-    File[] names = localf.listFiles();
+    String[] names = localf.list();
     if (names == null) {
     if (names == null) {
       return null;
       return null;
     }
     }
@@ -401,7 +401,9 @@ public class RawLocalFileSystem extends FileSystem {
     int j = 0;
     int j = 0;
     for (int i = 0; i < names.length; i++) {
     for (int i = 0; i < names.length; i++) {
       try {
       try {
-        results[j] = getFileStatus(new Path(names[i].getAbsolutePath()));
+        // Assemble the path using the Path 3 arg constructor to make sure
+        // paths with colon are properly resolved on Linux
+        results[j] = getFileStatus(new Path(f, new Path(null, null, names[i])));
         j++;
         j++;
       } catch (FileNotFoundException e) {
       } catch (FileNotFoundException e) {
         // ignore the files not found since the dir list may have have changed
         // ignore the files not found since the dir list may have have changed

+ 8 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java

@@ -20,6 +20,8 @@ package org.apache.hadoop.fs;
 import java.io.BufferedReader;
 import java.io.BufferedReader;
 import java.io.FileNotFoundException;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.NoSuchElementException;
 import java.util.StringTokenizer;
 import java.util.StringTokenizer;
 
 
@@ -62,6 +64,10 @@ public class Stat extends Shell {
     this.path = new Path(qualified.toUri().getPath());
     this.path = new Path(qualified.toUri().getPath());
     this.blockSize = blockSize;
     this.blockSize = blockSize;
     this.dereference = deref;
     this.dereference = deref;
+    // LANG = C setting
+    Map<String, String> env = new HashMap<String, String>();
+    env.put("LANG", "C");
+    setEnvironment(env);
   }
   }
 
 
   public FileStatus getFileStatus() throws IOException {
   public FileStatus getFileStatus() throws IOException {
@@ -74,7 +80,7 @@ public class Stat extends Shell {
    * @return
    * @return
    */
    */
   public static boolean isAvailable() {
   public static boolean isAvailable() {
-    if (Shell.LINUX || Shell.FREEBSD) {
+    if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) {
       return true;
       return true;
     }
     }
     return false;
     return false;
@@ -94,7 +100,7 @@ public class Stat extends Shell {
     if (Shell.LINUX) {
     if (Shell.LINUX) {
       return new String[] {
       return new String[] {
           "stat", derefFlag + "c", "%s,%F,%Y,%X,%a,%U,%G,%N", path.toString() };
           "stat", derefFlag + "c", "%s,%F,%Y,%X,%a,%U,%G,%N", path.toString() };
-    } else if (Shell.FREEBSD) {
+    } else if (Shell.FREEBSD || Shell.MAC) {
       return new String[] {
       return new String[] {
           "stat", derefFlag + "f", "%z,%HT,%m,%a,%Op,%Su,%Sg,`link' -> `%Y'",
           "stat", derefFlag + "f", "%z,%HT,%m,%a,%Op,%Su,%Sg,`link' -> `%Y'",
           path.toString() };
           path.toString() };

+ 15 - 11
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java

@@ -106,10 +106,12 @@ public class PathData implements Comparable<PathData> {
 
 
   /**
   /**
    * Validates the given Windows path.
    * Validates the given Windows path.
-   * Throws IOException on failure.
    * @param pathString a String of the path suppliued by the user.
    * @param pathString a String of the path suppliued by the user.
+   * @return true if the URI scheme was not present in the pathString but
+   * inferred; false, otherwise.
+   * @throws IOException if anything goes wrong
    */
    */
-  private void ValidateWindowsPath(String pathString)
+  private static boolean checkIfSchemeInferredFromPath(String pathString)
   throws IOException
   throws IOException
   {
   {
     if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
     if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
@@ -118,23 +120,21 @@ public class PathData implements Comparable<PathData> {
         throw new IOException("Invalid path string " + pathString);
         throw new IOException("Invalid path string " + pathString);
       }
       }
 
 
-      inferredSchemeFromPath = true;
-      return;
+      return true;
     }
     }
 
 
     // Is it a forward slash-separated absolute path?
     // Is it a forward slash-separated absolute path?
     if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
     if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
-      inferredSchemeFromPath = true;
-      return;
+      return true;
     }
     }
 
 
     // Does it look like a URI? If so then just leave it alone.
     // Does it look like a URI? If so then just leave it alone.
     if (potentialUri.matcher(pathString).find()) {
     if (potentialUri.matcher(pathString).find()) {
-      return;
+      return false;
     }
     }
 
 
     // Looks like a relative path on Windows.
     // Looks like a relative path on Windows.
-    return;
+    return false;
   }
   }
 
 
   /**
   /**
@@ -153,7 +153,7 @@ public class PathData implements Comparable<PathData> {
     setStat(stat);
     setStat(stat);
 
 
     if (Path.WINDOWS) {
     if (Path.WINDOWS) {
-      ValidateWindowsPath(pathString);
+      inferredSchemeFromPath = checkIfSchemeInferredFromPath(pathString);
     }
     }
   }
   }
 
 
@@ -302,7 +302,7 @@ public class PathData implements Comparable<PathData> {
     // check getPath() so scheme slashes aren't considered part of the path
     // check getPath() so scheme slashes aren't considered part of the path
     String separator = uri.getPath().endsWith(Path.SEPARATOR)
     String separator = uri.getPath().endsWith(Path.SEPARATOR)
         ? "" : Path.SEPARATOR;
         ? "" : Path.SEPARATOR;
-    return uri + separator + basename;
+    return uriToString(uri, inferredSchemeFromPath) + separator + basename;
   }
   }
   
   
   protected enum PathType { HAS_SCHEME, SCHEMELESS_ABSOLUTE, RELATIVE };
   protected enum PathType { HAS_SCHEME, SCHEMELESS_ABSOLUTE, RELATIVE };
@@ -356,7 +356,7 @@ public class PathData implements Comparable<PathData> {
             if (globUri.getAuthority() == null) {
             if (globUri.getAuthority() == null) {
               matchUri = removeAuthority(matchUri);
               matchUri = removeAuthority(matchUri);
             }
             }
-            globMatch = matchUri.toString();
+            globMatch = uriToString(matchUri, false);
             break;
             break;
           case SCHEMELESS_ABSOLUTE: // take just the uri's path
           case SCHEMELESS_ABSOLUTE: // take just the uri's path
             globMatch = matchUri.getPath();
             globMatch = matchUri.getPath();
@@ -438,6 +438,10 @@ public class PathData implements Comparable<PathData> {
    */
    */
   @Override
   @Override
   public String toString() {
   public String toString() {
+    return uriToString(uri, inferredSchemeFromPath);
+  }
+ 
+  private static String uriToString(URI uri, boolean inferredSchemeFromPath) {
     String scheme = uri.getScheme();
     String scheme = uri.getScheme();
     // No interpretation of symbols. Just decode % escaped chars.
     // No interpretation of symbols. Just decode % escaped chars.
     String decodedRemainder = uri.getSchemeSpecificPart();
     String decodedRemainder = uri.getSchemeSpecificPart();

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SnapshotCommands.java

@@ -79,7 +79,7 @@ class SnapshotCommands extends FsCommand {
     protected void processArguments(LinkedList<PathData> items)
     protected void processArguments(LinkedList<PathData> items)
     throws IOException {
     throws IOException {
       super.processArguments(items);
       super.processArguments(items);
-      if (exitCode != 0) { // check for error collecting paths
+      if (numErrors != 0) { // check for error collecting paths
         return;
         return;
       }
       }
       assert(items.size() == 1);
       assert(items.size() == 1);
@@ -119,7 +119,7 @@ class SnapshotCommands extends FsCommand {
     protected void processArguments(LinkedList<PathData> items)
     protected void processArguments(LinkedList<PathData> items)
         throws IOException {
         throws IOException {
       super.processArguments(items);
       super.processArguments(items);
-      if (exitCode != 0) { // check for error collecting paths
+      if (numErrors != 0) { // check for error collecting paths
         return;
         return;
       }
       }
       assert (items.size() == 1);
       assert (items.size() == 1);
@@ -160,7 +160,7 @@ class SnapshotCommands extends FsCommand {
     protected void processArguments(LinkedList<PathData> items)
     protected void processArguments(LinkedList<PathData> items)
         throws IOException {
         throws IOException {
       super.processArguments(items);
       super.processArguments(items);
-      if (exitCode != 0) { // check for error collecting paths
+      if (numErrors != 0) { // check for error collecting paths
         return;
         return;
       }
       }
       Preconditions.checkArgument(items.size() == 1);
       Preconditions.checkArgument(items.size() == 1);

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java

@@ -31,7 +31,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.ha.HAZKUtil.ZKAuthInfo;
+import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException;
@@ -47,7 +47,6 @@ import org.apache.zookeeper.KeeperException.Code;
 
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 
 
 /**
 /**
  * 
  * 

+ 6 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java

@@ -36,7 +36,8 @@ import org.apache.hadoop.ha.ActiveStandbyElector.ActiveNotFoundException;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
 import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
 import org.apache.hadoop.ha.HAServiceProtocol.RequestSource;
 import org.apache.hadoop.ha.HAServiceProtocol.RequestSource;
-import org.apache.hadoop.ha.HAZKUtil.ZKAuthInfo;
+import org.apache.hadoop.util.ZKUtil;
+import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.hadoop.ha.HealthMonitor.State;
 import org.apache.hadoop.ha.HealthMonitor.State;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.AccessControlException;
@@ -313,18 +314,18 @@ public abstract class ZKFailoverController {
         ZK_SESSION_TIMEOUT_DEFAULT);
         ZK_SESSION_TIMEOUT_DEFAULT);
     // Parse ACLs from configuration.
     // Parse ACLs from configuration.
     String zkAclConf = conf.get(ZK_ACL_KEY, ZK_ACL_DEFAULT);
     String zkAclConf = conf.get(ZK_ACL_KEY, ZK_ACL_DEFAULT);
-    zkAclConf = HAZKUtil.resolveConfIndirection(zkAclConf);
-    List<ACL> zkAcls = HAZKUtil.parseACLs(zkAclConf);
+    zkAclConf = ZKUtil.resolveConfIndirection(zkAclConf);
+    List<ACL> zkAcls = ZKUtil.parseACLs(zkAclConf);
     if (zkAcls.isEmpty()) {
     if (zkAcls.isEmpty()) {
       zkAcls = Ids.CREATOR_ALL_ACL;
       zkAcls = Ids.CREATOR_ALL_ACL;
     }
     }
     
     
     // Parse authentication from configuration.
     // Parse authentication from configuration.
     String zkAuthConf = conf.get(ZK_AUTH_KEY);
     String zkAuthConf = conf.get(ZK_AUTH_KEY);
-    zkAuthConf = HAZKUtil.resolveConfIndirection(zkAuthConf);
+    zkAuthConf = ZKUtil.resolveConfIndirection(zkAuthConf);
     List<ZKAuthInfo> zkAuths;
     List<ZKAuthInfo> zkAuths;
     if (zkAuthConf != null) {
     if (zkAuthConf != null) {
-      zkAuths = HAZKUtil.parseAuth(zkAuthConf);
+      zkAuths = ZKUtil.parseAuth(zkAuthConf);
     } else {
     } else {
       zkAuths = Collections.emptyList();
       zkAuths = Collections.emptyList();
     }
     }

+ 94 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java

@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.http;
+
+import java.util.HashMap;
+
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogConfigurationException;
+import org.apache.commons.logging.LogFactory;
+import org.apache.log4j.Appender;
+import org.apache.log4j.Logger;
+import org.mortbay.jetty.NCSARequestLog;
+import org.mortbay.jetty.RequestLog;
+
+/**
+ * RequestLog object for use with Http
+ */
+public class HttpRequestLog {
+
+  public static final Log LOG = LogFactory.getLog(HttpRequestLog.class);
+  private static final HashMap<String, String> serverToComponent;
+
+  static {
+    serverToComponent = new HashMap<String, String>();
+    serverToComponent.put("cluster", "resourcemanager");
+    serverToComponent.put("hdfs", "namenode");
+    serverToComponent.put("node", "nodemanager");
+  }
+
+  public static RequestLog getRequestLog(String name) {
+
+    String lookup = serverToComponent.get(name);
+    if (lookup != null) {
+      name = lookup;
+    }
+    String loggerName = "http.requests." + name;
+    String appenderName = name + "requestlog";
+    Log logger = LogFactory.getLog(loggerName);
+
+    if (logger instanceof Log4JLogger) {
+      Log4JLogger httpLog4JLog = (Log4JLogger)logger;
+      Logger httpLogger = httpLog4JLog.getLogger();
+      Appender appender = null;
+
+      try {
+        appender = httpLogger.getAppender(appenderName);
+      } catch (LogConfigurationException e) {
+        LOG.warn("Http request log for " + loggerName
+            + " could not be created");
+        throw e;
+      }
+
+      if (appender == null) {
+        LOG.info("Http request log for " + loggerName
+            + " is not defined");
+        return null;
+      }
+
+      if (appender instanceof HttpRequestLogAppender) {
+        HttpRequestLogAppender requestLogAppender
+          = (HttpRequestLogAppender)appender;
+        NCSARequestLog requestLog = new NCSARequestLog();
+        requestLog.setFilename(requestLogAppender.getFilename());
+        requestLog.setRetainDays(requestLogAppender.getRetainDays());
+        return requestLog;
+      }
+      else {
+        LOG.warn("Jetty request log for " + loggerName
+            + " was of the wrong class");
+        return null;
+      }
+    }
+    else {
+      LOG.warn("Jetty request log can only be enabled using Log4j");
+      return null;
+    }
+  }
+}

+ 39 - 19
hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytab.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLogAppender.java

@@ -15,28 +15,48 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.directory.server.kerberos.shared.keytab;
+package org.apache.hadoop.http;
 
 
-import java.io.File;
-import java.io.IOException;
-import java.nio.ByteBuffer;
+import org.apache.log4j.spi.LoggingEvent;
+import org.apache.log4j.AppenderSkeleton;
 
 
-//This is a hack for ApacheDS 2.0.0-M14 to be able to create
-//keytab files with more than one principal.
-//It needs to be in this package because the KeytabEncoder class is package 
-// private.
-//This class can be removed once jira DIRSERVER-1882
-// (https://issues.apache.org/jira/browse/DIRSERVER-1882) solved
-public class HackedKeytab extends Keytab {
+/**
+ * Log4j Appender adapter for HttpRequestLog
+ */
+public class HttpRequestLogAppender extends AppenderSkeleton {
+
+  private String filename;
+  private int retainDays;
+
+  public HttpRequestLogAppender() {
+  }
+
+  public void setRetainDays(int retainDays) {
+    this.retainDays = retainDays;
+  }
 
 
-  private byte[] keytabVersion = VERSION_52;
+  public int getRetainDays() {
+    return retainDays;
+  }
 
 
-  public void write( File file, int principalCount ) throws IOException
-  {
-    HackedKeytabEncoder writer = new HackedKeytabEncoder();
-    ByteBuffer buffer = writer.write( keytabVersion, getEntries(),
-            principalCount );
-    writeFile( buffer, file );
+  public void setFilename(String filename) {
+    this.filename = filename;
   }
   }
 
 
-}
+  public String getFilename() {
+    return filename;
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+  }
+
+  @Override
+  public void close() {
+  }
+
+  @Override
+  public boolean requiresLayout() {
+    return false;
+  }
+}

+ 126 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java

@@ -47,6 +47,7 @@ import javax.servlet.http.HttpServletResponse;
 
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.ConfServlet;
 import org.apache.hadoop.conf.ConfServlet;
@@ -66,9 +67,12 @@ import org.mortbay.io.Buffer;
 import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.Handler;
 import org.mortbay.jetty.Handler;
 import org.mortbay.jetty.MimeTypes;
 import org.mortbay.jetty.MimeTypes;
+import org.mortbay.jetty.RequestLog;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.handler.ContextHandler;
 import org.mortbay.jetty.handler.ContextHandler;
 import org.mortbay.jetty.handler.ContextHandlerCollection;
 import org.mortbay.jetty.handler.ContextHandlerCollection;
+import org.mortbay.jetty.handler.RequestLogHandler;
+import org.mortbay.jetty.handler.HandlerCollection;
 import org.mortbay.jetty.nio.SelectChannelConnector;
 import org.mortbay.jetty.nio.SelectChannelConnector;
 import org.mortbay.jetty.security.SslSocketConnector;
 import org.mortbay.jetty.security.SslSocketConnector;
 import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.Context;
@@ -119,18 +123,117 @@ public class HttpServer implements FilterContainer {
   protected final Map<Context, Boolean> defaultContexts =
   protected final Map<Context, Boolean> defaultContexts =
       new HashMap<Context, Boolean>();
       new HashMap<Context, Boolean>();
   protected final List<String> filterNames = new ArrayList<String>();
   protected final List<String> filterNames = new ArrayList<String>();
-  private static final int MAX_RETRIES = 10;
   static final String STATE_DESCRIPTION_ALIVE = " - alive";
   static final String STATE_DESCRIPTION_ALIVE = " - alive";
   static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
   static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
 
 
   private final boolean listenerStartedExternally;
   private final boolean listenerStartedExternally;
   
   
+  /**
+   * Class to construct instances of HTTP server with specific options.
+   */
+  public static class Builder {
+    String name;
+    String bindAddress;
+    Integer port;
+    Boolean findPort;
+    Configuration conf;
+    Connector connector;
+    String[] pathSpecs;
+    AccessControlList adminsAcl;
+    boolean securityEnabled = false;
+    String usernameConfKey = null;
+    String keytabConfKey = null;
+    
+    public Builder setName(String name){
+      this.name = name;
+      return this;
+    }
+    
+    public Builder setBindAddress(String bindAddress){
+      this.bindAddress = bindAddress;
+      return this;
+    }
+    
+    public Builder setPort(int port) {
+      this.port = port;
+      return this;
+    }
+    
+    public Builder setFindPort(boolean findPort) {
+      this.findPort = findPort;
+      return this;
+    }
+    
+    public Builder setConf(Configuration conf) {
+      this.conf = conf;
+      return this;
+    }
+    
+    public Builder setConnector(Connector connector) {
+      this.connector = connector;
+      return this;
+    }
+    
+    public Builder setPathSpec(String[] pathSpec) {
+      this.pathSpecs = pathSpec;
+      return this;
+    }
+    
+    public Builder setACL(AccessControlList acl) {
+      this.adminsAcl = acl;
+      return this;
+    }
+    
+    public Builder setSecurityEnabled(boolean securityEnabled) {
+      this.securityEnabled = securityEnabled;
+      return this;
+    }
+    
+    public Builder setUsernameConfKey(String usernameConfKey) {
+      this.usernameConfKey = usernameConfKey;
+      return this;
+    }
+    
+    public Builder setKeytabConfKey(String keytabConfKey) {
+      this.keytabConfKey = keytabConfKey;
+      return this;
+    }
+    
+    public HttpServer build() throws IOException {
+      if (this.name == null) {
+        throw new HadoopIllegalArgumentException("name is not set");
+      }
+      if (this.bindAddress == null) {
+        throw new HadoopIllegalArgumentException("bindAddress is not set");
+      }
+      if (this.port == null) {
+        throw new HadoopIllegalArgumentException("port is not set");
+      }
+      if (this.findPort == null) {
+        throw new HadoopIllegalArgumentException("findPort is not set");
+      }
+      
+      if (this.conf == null) {
+        conf = new Configuration();
+      }
+      
+      HttpServer server = new HttpServer(this.name, this.bindAddress, this.port,
+      this.findPort, this.conf, this.adminsAcl, this.connector, this.pathSpecs);
+      if (this.securityEnabled) {
+        server.initSpnego(this.conf, this.usernameConfKey, this.keytabConfKey);
+      }
+      return server;
+    }
+  }
+  
   /** Same as this(name, bindAddress, port, findPort, null); */
   /** Same as this(name, bindAddress, port, findPort, null); */
+  @Deprecated
   public HttpServer(String name, String bindAddress, int port, boolean findPort
   public HttpServer(String name, String bindAddress, int port, boolean findPort
       ) throws IOException {
       ) throws IOException {
     this(name, bindAddress, port, findPort, new Configuration());
     this(name, bindAddress, port, findPort, new Configuration());
   }
   }
-
+  
+  @Deprecated
   public HttpServer(String name, String bindAddress, int port,
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf, Connector connector) throws IOException {
       boolean findPort, Configuration conf, Connector connector) throws IOException {
     this(name, bindAddress, port, findPort, conf, null, connector, null);
     this(name, bindAddress, port, findPort, conf, null, connector, null);
@@ -150,6 +253,7 @@ public class HttpServer implements FilterContainer {
    * @param pathSpecs Path specifications that this httpserver will be serving. 
    * @param pathSpecs Path specifications that this httpserver will be serving. 
    *        These will be added to any filters.
    *        These will be added to any filters.
    */
    */
+  @Deprecated
   public HttpServer(String name, String bindAddress, int port,
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
       boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
     this(name, bindAddress, port, findPort, conf, null, null, pathSpecs);
     this(name, bindAddress, port, findPort, conf, null, null, pathSpecs);
@@ -164,11 +268,13 @@ public class HttpServer implements FilterContainer {
    *        increment by 1 until it finds a free port.
    *        increment by 1 until it finds a free port.
    * @param conf Configuration 
    * @param conf Configuration 
    */
    */
+  @Deprecated
   public HttpServer(String name, String bindAddress, int port,
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf) throws IOException {
       boolean findPort, Configuration conf) throws IOException {
     this(name, bindAddress, port, findPort, conf, null, null, null);
     this(name, bindAddress, port, findPort, conf, null, null, null);
   }
   }
 
 
+  @Deprecated
   public HttpServer(String name, String bindAddress, int port,
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf, AccessControlList adminsAcl) 
       boolean findPort, Configuration conf, AccessControlList adminsAcl) 
       throws IOException {
       throws IOException {
@@ -186,6 +292,7 @@ public class HttpServer implements FilterContainer {
    * @param conf Configuration 
    * @param conf Configuration 
    * @param adminsAcl {@link AccessControlList} of the admins
    * @param adminsAcl {@link AccessControlList} of the admins
    */
    */
+  @Deprecated
   public HttpServer(String name, String bindAddress, int port,
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf, AccessControlList adminsAcl, 
       boolean findPort, Configuration conf, AccessControlList adminsAcl, 
       Connector connector) throws IOException {
       Connector connector) throws IOException {
@@ -251,7 +358,18 @@ public class HttpServer implements FilterContainer {
 
 
     final String appDir = getWebAppsPath(name);
     final String appDir = getWebAppsPath(name);
     ContextHandlerCollection contexts = new ContextHandlerCollection();
     ContextHandlerCollection contexts = new ContextHandlerCollection();
-    webServer.setHandler(contexts);
+    RequestLog requestLog = HttpRequestLog.getRequestLog(name);
+
+    if (requestLog != null) {
+      RequestLogHandler requestLogHandler = new RequestLogHandler();
+      requestLogHandler.setRequestLog(requestLog);
+      HandlerCollection handlers = new HandlerCollection();
+      handlers.setHandlers(new Handler[] {requestLogHandler, contexts});
+      webServer.setHandler(handlers);
+    }
+    else {
+      webServer.setHandler(contexts);
+    }
 
 
     webAppContext = new WebAppContext();
     webAppContext = new WebAppContext();
     webAppContext.setDisplayName(name);
     webAppContext.setDisplayName(name);
@@ -529,7 +647,7 @@ public class HttpServer implements FilterContainer {
   /**
   /**
    * Define a filter for a context and set up default url mappings.
    * Define a filter for a context and set up default url mappings.
    */
    */
-  protected void defineFilter(Context ctx, String name,
+  public void defineFilter(Context ctx, String name,
       String classname, Map<String,String> parameters, String[] urls) {
       String classname, Map<String,String> parameters, String[] urls) {
 
 
     FilterHolder holder = new FilterHolder();
     FilterHolder holder = new FilterHolder();
@@ -569,6 +687,10 @@ public class HttpServer implements FilterContainer {
   public Object getAttribute(String name) {
   public Object getAttribute(String name) {
     return webAppContext.getAttribute(name);
     return webAppContext.getAttribute(name);
   }
   }
+  
+  public WebAppContext getWebAppContext(){
+    return this.webAppContext;
+  }
 
 
   /**
   /**
    * Get the pathname to the webapps files.
    * Get the pathname to the webapps files.

+ 62 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/LossyRetryInvocationHandler.java

@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.retry;
+
+import java.lang.reflect.Method;
+import java.net.UnknownHostException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * A dummy invocation handler extending RetryInvocationHandler. It drops the
+ * first N number of responses. This invocation handler is only used for testing.
+ */
+@InterfaceAudience.Private
+public class LossyRetryInvocationHandler<T> extends RetryInvocationHandler<T> {
+  private final int numToDrop;
+  private static final ThreadLocal<Integer> RetryCount = 
+      new ThreadLocal<Integer>();
+
+  public LossyRetryInvocationHandler(int numToDrop,
+      FailoverProxyProvider<T> proxyProvider, RetryPolicy retryPolicy) {
+    super(proxyProvider, retryPolicy);
+    this.numToDrop = numToDrop;
+  }
+
+  @Override
+  public Object invoke(Object proxy, Method method, Object[] args)
+      throws Throwable {
+    RetryCount.set(0);
+    return super.invoke(proxy, method, args);
+  }
+
+  @Override
+  protected Object invokeMethod(Method method, Object[] args) throws Throwable {
+    Object result = super.invokeMethod(method, args);
+    int retryCount = RetryCount.get();
+    if (retryCount < this.numToDrop) {
+      RetryCount.set(++retryCount);
+      LOG.info("Drop the response. Current retryCount == " + retryCount);
+      throw new UnknownHostException("Fake Exception");
+    } else {
+      LOG.info("retryCount == " + retryCount
+          + ". It's time to normally process the response");
+      return result;
+    }
+  }
+}

+ 1 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java

@@ -63,7 +63,7 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler {
     this(proxyProvider, retryPolicy, Collections.<String, RetryPolicy>emptyMap());
     this(proxyProvider, retryPolicy, Collections.<String, RetryPolicy>emptyMap());
   }
   }
 
 
-  RetryInvocationHandler(FailoverProxyProvider<T> proxyProvider,
+  protected RetryInvocationHandler(FailoverProxyProvider<T> proxyProvider,
       RetryPolicy defaultPolicy,
       RetryPolicy defaultPolicy,
       Map<String, RetryPolicy> methodNameToPolicyMap) {
       Map<String, RetryPolicy> methodNameToPolicyMap) {
     this.proxyProvider = proxyProvider;
     this.proxyProvider = proxyProvider;
@@ -136,8 +136,6 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler {
             msg += ". Trying to fail over " + formatSleepMessage(action.delayMillis);
             msg += ". Trying to fail over " + formatSleepMessage(action.delayMillis);
             if (LOG.isDebugEnabled()) {
             if (LOG.isDebugEnabled()) {
               LOG.debug(msg, e);
               LOG.debug(msg, e);
-            } else {
-              LOG.warn(msg);
             }
             }
           } else {
           } else {
             if(LOG.isDebugEnabled()) {
             if(LOG.isDebugEnabled()) {

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java

@@ -1063,8 +1063,8 @@ public class Client {
         if (status == RpcStatusProto.SUCCESS) {
         if (status == RpcStatusProto.SUCCESS) {
           Writable value = ReflectionUtils.newInstance(valueClass, conf);
           Writable value = ReflectionUtils.newInstance(valueClass, conf);
           value.readFields(in);                 // read value
           value.readFields(in);                 // read value
-          call.setRpcResponse(value);
           calls.remove(callId);
           calls.remove(callId);
+          call.setRpcResponse(value);
           
           
           // verify that length was correct
           // verify that length was correct
           // only for ProtobufEngine where len can be verified easily
           // only for ProtobufEngine where len can be verified easily
@@ -1098,8 +1098,8 @@ public class Client {
                   new RemoteException(exceptionClassName, errorMsg) :
                   new RemoteException(exceptionClassName, errorMsg) :
               new RemoteException(exceptionClassName, errorMsg, erCode));
               new RemoteException(exceptionClassName, errorMsg, erCode));
           if (status == RpcStatusProto.ERROR) {
           if (status == RpcStatusProto.ERROR) {
-            call.setException(re);
             calls.remove(callId);
             calls.remove(callId);
+            call.setException(re);
           } else if (status == RpcStatusProto.FATAL) {
           } else if (status == RpcStatusProto.FATAL) {
             // Close the connection
             // Close the connection
             markClosed(re);
             markClosed(re);
@@ -1166,8 +1166,8 @@ public class Client {
       Iterator<Entry<Integer, Call>> itor = calls.entrySet().iterator() ;
       Iterator<Entry<Integer, Call>> itor = calls.entrySet().iterator() ;
       while (itor.hasNext()) {
       while (itor.hasNext()) {
         Call c = itor.next().getValue(); 
         Call c = itor.next().getValue(); 
+        itor.remove();
         c.setException(closeException); // local exception
         c.setException(closeException); // local exception
-        itor.remove();         
       }
       }
     }
     }
   }
   }

+ 21 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java

@@ -76,6 +76,12 @@ public class RetryCache {
       this.expirationTime = expirationTime;
       this.expirationTime = expirationTime;
     }
     }
 
 
+    CacheEntry(byte[] clientId, int callId, long expirationTime,
+        boolean success) {
+      this(clientId, callId, expirationTime);
+      this.state = success ? SUCCESS : FAILED;
+    }
+
     private static int hashCode(long value) {
     private static int hashCode(long value) {
       return (int)(value ^ (value >>> 32));
       return (int)(value ^ (value >>> 32));
     }
     }
@@ -147,6 +153,12 @@ public class RetryCache {
       this.payload = payload;
       this.payload = payload;
     }
     }
 
 
+    CacheEntryWithPayload(byte[] clientId, int callId, Object payload,
+        long expirationTime, boolean success) {
+     super(clientId, callId, expirationTime, success);
+     this.payload = payload;
+   }
+
     /** Override equals to avoid findbugs warnings */
     /** Override equals to avoid findbugs warnings */
     @Override
     @Override
     public boolean equals(Object obj) {
     public boolean equals(Object obj) {
@@ -253,18 +265,20 @@ public class RetryCache {
    */
    */
   public void addCacheEntry(byte[] clientId, int callId) {
   public void addCacheEntry(byte[] clientId, int callId) {
     CacheEntry newEntry = new CacheEntry(clientId, callId, System.nanoTime()
     CacheEntry newEntry = new CacheEntry(clientId, callId, System.nanoTime()
-        + expirationTime);
-    newEntry.completed(true);
-    set.put(newEntry);
+        + expirationTime, true);
+    synchronized(this) {
+      set.put(newEntry);
+    }
   }
   }
   
   
   public void addCacheEntryWithPayload(byte[] clientId, int callId,
   public void addCacheEntryWithPayload(byte[] clientId, int callId,
       Object payload) {
       Object payload) {
-    CacheEntry newEntry = new CacheEntryWithPayload(clientId, callId, payload,
-        System.nanoTime() + expirationTime);
     // since the entry is loaded from editlog, we can assume it succeeded.    
     // since the entry is loaded from editlog, we can assume it succeeded.    
-    newEntry.completed(true);
-    set.put(newEntry);
+    CacheEntry newEntry = new CacheEntryWithPayload(clientId, callId, payload,
+        System.nanoTime() + expirationTime, true);
+    synchronized(this) {
+      set.put(newEntry);
+    }
   }
   }
 
 
   private static CacheEntry newEntry(long expirationTime) {
   private static CacheEntry newEntry(long expirationTime) {

+ 9 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

@@ -1311,7 +1311,15 @@ public abstract class Server {
           Throwable cause = e;
           Throwable cause = e;
           while (cause != null) {
           while (cause != null) {
             if (cause instanceof InvalidToken) {
             if (cause instanceof InvalidToken) {
-              sendToClient = (InvalidToken) cause;
+              // FIXME: hadoop method signatures are restricting the SASL
+              // callbacks to only returning InvalidToken, but some services
+              // need to throw other exceptions (ex. NN + StandyException),
+              // so for now we'll tunnel the real exceptions via an
+              // InvalidToken's cause which normally is not set 
+              if (cause.getCause() != null) {
+                cause = cause.getCause();
+              }
+              sendToClient = (IOException) cause;
               break;
               break;
             }
             }
             cause = cause.getCause();
             cause = cause.getCause();

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java

@@ -104,7 +104,7 @@ public class SaslRpcServer {
         if (LOG.isDebugEnabled())
         if (LOG.isDebugEnabled())
           LOG.debug("Kerberos principal name is " + fullName);
           LOG.debug("Kerberos principal name is " + fullName);
         // don't use KerberosName because we don't want auth_to_local
         // don't use KerberosName because we don't want auth_to_local
-        String[] parts = fullName.split("[/@]", 2);
+        String[] parts = fullName.split("[/@]", 3);
         protocol = parts[0];
         protocol = parts[0];
         // should verify service host is present here rather than in create()
         // should verify service host is present here rather than in create()
         // but lazy tests are using a UGI that isn't a SPN...
         // but lazy tests are using a UGI that isn't a SPN...
@@ -127,7 +127,6 @@ public class SaslRpcServer {
     final CallbackHandler callback;
     final CallbackHandler callback;
     switch (authMethod) {
     switch (authMethod) {
       case TOKEN: {
       case TOKEN: {
-        secretManager.checkAvailableForRead();
         callback = new SaslDigestCallbackHandler(secretManager, connection);
         callback = new SaslDigestCallbackHandler(secretManager, connection);
         break;
         break;
       }
       }

+ 19 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/TokenIdentifier.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.security.token;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Arrays;
 
 
+import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
@@ -35,6 +36,9 @@ import org.apache.hadoop.security.UserGroupInformation;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 @InterfaceStability.Evolving
 public abstract class TokenIdentifier implements Writable {
 public abstract class TokenIdentifier implements Writable {
+
+  private String trackingId = null;
+
   /**
   /**
    * Get the token kind
    * Get the token kind
    * @return the kind of the token
    * @return the kind of the token
@@ -62,4 +66,19 @@ public abstract class TokenIdentifier implements Writable {
     }
     }
     return Arrays.copyOf(buf.getData(), buf.getLength());
     return Arrays.copyOf(buf.getData(), buf.getLength());
   }
   }
+
+  /**
+   * Returns a tracking identifier that can be used to associate usages of a
+   * token across multiple client sessions.
+   *
+   * Currently, this function just returns an MD5 of {{@link #getBytes()}.
+   *
+   * @return tracking identifier
+   */
+  public String getTrackingId() {
+    if (trackingId == null) {
+      trackingId = DigestUtils.md5Hex(getBytes());
+    }
+    return trackingId;
+  }
 }
 }

+ 34 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java

@@ -86,6 +86,11 @@ extends AbstractDelegationTokenIdentifier>
   private long tokenMaxLifetime;
   private long tokenMaxLifetime;
   private long tokenRemoverScanInterval;
   private long tokenRemoverScanInterval;
   private long tokenRenewInterval;
   private long tokenRenewInterval;
+  /**
+   * Whether to store a token's tracking ID in its TokenInformation.
+   * Can be overridden by a subclass.
+   */
+  protected boolean storeTokenTrackingId;
   private Thread tokenRemoverThread;
   private Thread tokenRemoverThread;
   protected volatile boolean running;
   protected volatile boolean running;
 
 
@@ -102,6 +107,7 @@ extends AbstractDelegationTokenIdentifier>
     this.tokenMaxLifetime = delegationTokenMaxLifetime;
     this.tokenMaxLifetime = delegationTokenMaxLifetime;
     this.tokenRenewInterval = delegationTokenRenewInterval;
     this.tokenRenewInterval = delegationTokenRenewInterval;
     this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
     this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
+    this.storeTokenTrackingId = false;
   }
   }
 
 
   /** should be called before this object is used */
   /** should be called before this object is used */
@@ -201,7 +207,7 @@ extends AbstractDelegationTokenIdentifier>
     }
     }
     if (currentTokens.get(identifier) == null) {
     if (currentTokens.get(identifier) == null) {
       currentTokens.put(identifier, new DelegationTokenInformation(renewDate,
       currentTokens.put(identifier, new DelegationTokenInformation(renewDate,
-          password));
+          password, getTrackingIdIfEnabled(identifier)));
     } else {
     } else {
       throw new IOException(
       throw new IOException(
           "Same delegation token being added twice.");
           "Same delegation token being added twice.");
@@ -280,7 +286,7 @@ extends AbstractDelegationTokenIdentifier>
     byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
     byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
     storeNewToken(identifier, now + tokenRenewInterval);
     storeNewToken(identifier, now + tokenRenewInterval);
     currentTokens.put(identifier, new DelegationTokenInformation(now
     currentTokens.put(identifier, new DelegationTokenInformation(now
-        + tokenRenewInterval, password));
+        + tokenRenewInterval, password, getTrackingIdIfEnabled(identifier)));
     return password;
     return password;
   }
   }
 
 
@@ -299,6 +305,21 @@ extends AbstractDelegationTokenIdentifier>
     return info.getPassword();
     return info.getPassword();
   }
   }
 
 
+  protected String getTrackingIdIfEnabled(TokenIdent ident) {
+    if (storeTokenTrackingId) {
+      return ident.getTrackingId();
+    }
+    return null;
+  }
+
+  public synchronized String getTokenTrackingId(TokenIdent identifier) {
+    DelegationTokenInformation info = currentTokens.get(identifier);
+    if (info == null) {
+      return null;
+    }
+    return info.getTrackingId();
+  }
+
   /**
   /**
    * Verifies that the given identifier and password are valid and match.
    * Verifies that the given identifier and password are valid and match.
    * @param identifier Token identifier.
    * @param identifier Token identifier.
@@ -359,8 +380,9 @@ extends AbstractDelegationTokenIdentifier>
           + " is trying to renew a token with " + "wrong password");
           + " is trying to renew a token with " + "wrong password");
     }
     }
     long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
     long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
+    String trackingId = getTrackingIdIfEnabled(id);
     DelegationTokenInformation info = new DelegationTokenInformation(renewTime,
     DelegationTokenInformation info = new DelegationTokenInformation(renewTime,
-        password);
+        password, trackingId);
 
 
     if (currentTokens.get(id) == null) {
     if (currentTokens.get(id) == null) {
       throw new InvalidToken("Renewal request for unknown token");
       throw new InvalidToken("Renewal request for unknown token");
@@ -420,9 +442,13 @@ extends AbstractDelegationTokenIdentifier>
   public static class DelegationTokenInformation {
   public static class DelegationTokenInformation {
     long renewDate;
     long renewDate;
     byte[] password;
     byte[] password;
-    public DelegationTokenInformation(long renewDate, byte[] password) {
+    String trackingId;
+
+    public DelegationTokenInformation(long renewDate, byte[] password,
+        String trackingId) {
       this.renewDate = renewDate;
       this.renewDate = renewDate;
       this.password = password;
       this.password = password;
+      this.trackingId = trackingId;
     }
     }
     /** returns renew date */
     /** returns renew date */
     public long getRenewDate() {
     public long getRenewDate() {
@@ -432,6 +458,10 @@ extends AbstractDelegationTokenIdentifier>
     byte[] getPassword() {
     byte[] getPassword() {
       return password;
       return password;
     }
     }
+    /** returns tracking id */
+    public String getTrackingId() {
+      return trackingId;
+    }
   }
   }
   
   
   /** Remove expired delegation tokens from cache */
   /** Remove expired delegation tokens from cache */

+ 20 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java

@@ -64,6 +64,11 @@ public class CompositeService extends AbstractService {
     }
     }
   }
   }
 
 
+  /**
+   * Add the passed {@link Service} to the list of services managed by this
+   * {@link CompositeService}
+   * @param service the {@link Service} to be added
+   */
   protected void addService(Service service) {
   protected void addService(Service service) {
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("Adding service " + service.getName());
       LOG.debug("Adding service " + service.getName());
@@ -73,6 +78,21 @@ public class CompositeService extends AbstractService {
     }
     }
   }
   }
 
 
+  /**
+   * If the passed object is an instance of {@link Service},
+   * add it to the list of services managed by this {@link CompositeService}
+   * @param object
+   * @return true if a service is added, false otherwise.
+   */
+  protected boolean addIfService(Object object) {
+    if (object instanceof Service) {
+      addService((Service) object);
+      return true;
+    } else {
+      return false;
+    }
+  }
+
   protected synchronized boolean removeService(Service service) {
   protected synchronized boolean removeService(Service service) {
     synchronized (serviceList) {
     synchronized (serviceList) {
       return serviceList.add(service);
       return serviceList.add(service);

+ 14 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

@@ -515,8 +515,13 @@ abstract public class Shell {
       } catch (IOException ioe) {
       } catch (IOException ioe) {
         LOG.warn("Error while closing the input stream", ioe);
         LOG.warn("Error while closing the input stream", ioe);
       }
       }
-      if (!completed.get()) {
-        errThread.interrupt();
+      try {
+        if (!completed.get()) {
+          errThread.interrupt();
+          errThread.join();
+        }
+      } catch (InterruptedException ie) {
+        LOG.warn("Interrupted while joining errThread");
       }
       }
       try {
       try {
         errReader.close();
         errReader.close();
@@ -535,6 +540,13 @@ abstract public class Shell {
   protected abstract void parseExecResult(BufferedReader lines)
   protected abstract void parseExecResult(BufferedReader lines)
   throws IOException;
   throws IOException;
 
 
+  /** 
+   * Get the environment variable
+   */
+  public String getEnvironment(String env) {
+    return environment.get(env);
+  }
+  
   /** get the current sub-process executing the given command 
   /** get the current sub-process executing the given command 
    * @return process executing the command
    * @return process executing the command
    */
    */

+ 3 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java

@@ -48,6 +48,9 @@ public class VersionInfo {
     try {
     try {
       InputStream is = Thread.currentThread().getContextClassLoader()
       InputStream is = Thread.currentThread().getContextClassLoader()
         .getResourceAsStream(versionInfoFile);
         .getResourceAsStream(versionInfoFile);
+      if (is == null) {
+        throw new IOException("Resource not found");
+      }
       info.load(is);
       info.load(is);
     } catch (IOException ex) {
     } catch (IOException ex) {
       LogFactory.getLog(getClass()).warn("Could not read '" + 
       LogFactory.getLog(getClass()).warn("Could not read '" + 

+ 19 - 12
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAZKUtil.java → hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.hadoop.ha;
+package org.apache.hadoop.util;
 
 
 import java.io.File;
 import java.io.File;
 import java.io.IOException;
 import java.io.IOException;
@@ -36,7 +36,7 @@ import com.google.common.io.Files;
  * Utilities for working with ZooKeeper.
  * Utilities for working with ZooKeeper.
  */
  */
 @InterfaceAudience.Private
 @InterfaceAudience.Private
-public class HAZKUtil {
+public class ZKUtil {
   
   
   /**
   /**
    * Parse ACL permission string, partially borrowed from
    * Parse ACL permission string, partially borrowed from
@@ -76,9 +76,10 @@ public class HAZKUtil {
    * <code>sasl:hdfs/host1@MY.DOMAIN:cdrwa,sasl:hdfs/host2@MY.DOMAIN:cdrwa</code>
    * <code>sasl:hdfs/host1@MY.DOMAIN:cdrwa,sasl:hdfs/host2@MY.DOMAIN:cdrwa</code>
    *
    *
    * @return ACL list
    * @return ACL list
-   * @throws HadoopIllegalArgumentException if an ACL is invalid
+   * @throws {@link BadAclFormatException} if an ACL is invalid
    */
    */
-  public static List<ACL> parseACLs(String aclString) {
+  public static List<ACL> parseACLs(String aclString) throws
+      BadAclFormatException {
     List<ACL> acl = Lists.newArrayList();
     List<ACL> acl = Lists.newArrayList();
     if (aclString == null) {
     if (aclString == null) {
       return acl;
       return acl;
@@ -113,8 +114,10 @@ public class HAZKUtil {
    * 
    * 
    * @param authString the comma-separated auth mechanisms
    * @param authString the comma-separated auth mechanisms
    * @return a list of parsed authentications
    * @return a list of parsed authentications
+   * @throws {@link BadAuthFormatException} if the auth format is invalid
    */
    */
-  public static List<ZKAuthInfo> parseAuth(String authString) {
+  public static List<ZKAuthInfo> parseAuth(String authString) throws
+      BadAuthFormatException{
     List<ZKAuthInfo> ret = Lists.newArrayList();
     List<ZKAuthInfo> ret = Lists.newArrayList();
     if (authString == null) {
     if (authString == null) {
       return ret;
       return ret;
@@ -161,7 +164,8 @@ public class HAZKUtil {
   /**
   /**
    * An authentication token passed to ZooKeeper.addAuthInfo
    * An authentication token passed to ZooKeeper.addAuthInfo
    */
    */
-  static class ZKAuthInfo {
+  @InterfaceAudience.Private
+  public static class ZKAuthInfo {
     private final String scheme;
     private final String scheme;
     private final byte[] auth;
     private final byte[] auth;
     
     
@@ -171,29 +175,32 @@ public class HAZKUtil {
       this.auth = auth;
       this.auth = auth;
     }
     }
 
 
-    String getScheme() {
+    public String getScheme() {
       return scheme;
       return scheme;
     }
     }
 
 
-    byte[] getAuth() {
+    public byte[] getAuth() {
       return auth;
       return auth;
     }
     }
   }
   }
 
 
-  static class BadAclFormatException extends HadoopIllegalArgumentException {
+  @InterfaceAudience.Private
+  public static class BadAclFormatException extends
+      HadoopIllegalArgumentException {
     private static final long serialVersionUID = 1L;
     private static final long serialVersionUID = 1L;
 
 
     public BadAclFormatException(String message) {
     public BadAclFormatException(String message) {
       super(message);
       super(message);
     }
     }
   }
   }
-  
-  static class BadAuthFormatException extends HadoopIllegalArgumentException {
+
+  @InterfaceAudience.Private
+  public static class BadAuthFormatException extends
+      HadoopIllegalArgumentException {
     private static final long serialVersionUID = 1L;
     private static final long serialVersionUID = 1L;
 
 
     public BadAuthFormatException(String message) {
     public BadAuthFormatException(String message) {
       super(message);
       super(message);
     }
     }
   }
   }
-
 }
 }

+ 2 - 2
hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm

@@ -518,7 +518,7 @@ $ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start nodemana
   are used with load balancing it should be run on each of them:
   are used with load balancing it should be run on each of them:
 
 
 ----
 ----
-$ $HADOOP_YARN_HOME/bin/yarn start proxyserver --config $HADOOP_CONF_DIR
+$ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh start proxyserver --config $HADOOP_CONF_DIR
 ----
 ----
 
 
   Start the MapReduce JobHistory Server with the following command, run on the
   Start the MapReduce JobHistory Server with the following command, run on the
@@ -560,7 +560,7 @@ $ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop nodemanag
   balancing it should be run on each of them:
   balancing it should be run on each of them:
 
 
 ----
 ----
-$ $HADOOP_YARN_HOME/bin/yarn stop proxyserver --config $HADOOP_CONF_DIR
+$ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh stop proxyserver --config $HADOOP_CONF_DIR
 ----
 ----
 
 
 
 

+ 26 - 15
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java

@@ -24,6 +24,9 @@ import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.File;
 import java.io.PrintStream;
 import java.io.PrintStream;
 import java.util.StringTokenizer;
 import java.util.StringTokenizer;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.ArrayList;
 
 
 /**
 /**
  *
  *
@@ -32,23 +35,31 @@ import java.util.StringTokenizer;
 public abstract class CommandExecutor {  
 public abstract class CommandExecutor {  
   protected String[] getCommandAsArgs(final String cmd, final String masterKey,
   protected String[] getCommandAsArgs(final String cmd, final String masterKey,
 		                                       final String master) {
 		                                       final String master) {
-    StringTokenizer tokenizer = new StringTokenizer(cmd, " ");
-    String[] args = new String[tokenizer.countTokens()];
-    
-    int i = 0;
-    while (tokenizer.hasMoreTokens()) {
-      args[i] = tokenizer.nextToken();
+    String regex = "\'([^\']*)\'|\"([^\"]*)\"|(\\S+)";
+    Matcher matcher = Pattern.compile(regex).matcher(cmd);
 
 
-      args[i] = args[i].replaceAll(masterKey, master);
-      args[i] = args[i].replaceAll("CLITEST_DATA", 
-        new File(CLITestHelper.TEST_CACHE_DATA_DIR).
-        toURI().toString().replace(' ', '+'));
-      args[i] = args[i].replaceAll("USERNAME", System.getProperty("user.name"));
+    ArrayList<String> args = new ArrayList<String>();
+    String arg = null;
 
 
-      i++;
-    }
-    
-    return args;
+    while (matcher.find()) {
+      if (matcher.group(1) != null) {
+        arg = matcher.group(1);
+      } else if (matcher.group(2) != null) {
+        arg = matcher.group(2);
+      } else {
+        arg = matcher.group(3);
+      }
+
+      arg = arg.replaceAll(masterKey, master);
+      arg = arg.replaceAll("CLITEST_DATA",
+         new File(CLITestHelper.TEST_CACHE_DATA_DIR).
+         toURI().toString().replace(' ', '+'));
+      arg = arg.replaceAll("USERNAME", System.getProperty("user.name"));
+
+      args.add(arg);
+     }
+
+    return args.toArray(new String[0]);
   }
   }
   
   
   public Result executeCommand(final String cmd) throws Exception {
   public Result executeCommand(final String cmd) throws Exception {

+ 17 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java

@@ -1272,7 +1272,23 @@ public class TestConfiguration extends TestCase {
    Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
    Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
    assertNotNull(clazz);
    assertNotNull(clazz);
   }
   }
-  
+
+  public void testGetFinalParameters() throws Exception {
+    out=new BufferedWriter(new FileWriter(CONFIG));
+    startConfig();
+    declareProperty("my.var", "x", "x", true);
+    endConfig();
+    Path fileResource = new Path(CONFIG);
+    Configuration conf = new Configuration();
+    Set<String> finalParameters = conf.getFinalParameters();
+    assertFalse("my.var already exists", finalParameters.contains("my.var"));
+    conf.addResource(fileResource);
+    assertEquals("my.var is undefined", "x", conf.get("my.var"));
+    assertFalse("finalparams not copied", finalParameters.contains("my.var"));
+    finalParameters = conf.getFinalParameters();
+    assertTrue("my.var is not final", finalParameters.contains("my.var"));
+  }
+
   public static void main(String[] argv) throws Exception {
   public static void main(String[] argv) throws Exception {
     junit.textui.TestRunner.main(new String[]{
     junit.textui.TestRunner.main(new String[]{
       TestConfiguration.class.getName()
       TestConfiguration.class.getName()

+ 20 - 8
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java

@@ -757,7 +757,7 @@ public class TestFileUtil {
     String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
     String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
     String nonExistentSubdir = tmp.getCanonicalPath() + Path.SEPARATOR + "subdir"
     String nonExistentSubdir = tmp.getCanonicalPath() + Path.SEPARATOR + "subdir"
       + Path.SEPARATOR;
       + Path.SEPARATOR;
-    List<String> classPaths = Arrays.asList("cp1.jar", "cp2.jar", wildcardPath,
+    List<String> classPaths = Arrays.asList("", "cp1.jar", "cp2.jar", wildcardPath,
       "cp3.jar", nonExistentSubdir);
       "cp3.jar", nonExistentSubdir);
     String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
     String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
     String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
     String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
@@ -776,20 +776,32 @@ public class TestFileUtil {
       Assert.assertNotNull(classPathAttr);
       Assert.assertNotNull(classPathAttr);
       List<String> expectedClassPaths = new ArrayList<String>();
       List<String> expectedClassPaths = new ArrayList<String>();
       for (String classPath: classPaths) {
       for (String classPath: classPaths) {
+        if (classPath.length() == 0) {
+          continue;
+        }
         if (wildcardPath.equals(classPath)) {
         if (wildcardPath.equals(classPath)) {
           // add wildcard matches
           // add wildcard matches
           for (File wildcardMatch: wildcardMatches) {
           for (File wildcardMatch: wildcardMatches) {
             expectedClassPaths.add(wildcardMatch.toURI().toURL()
             expectedClassPaths.add(wildcardMatch.toURI().toURL()
               .toExternalForm());
               .toExternalForm());
           }
           }
-        } else if (nonExistentSubdir.equals(classPath)) {
-          // expect to maintain trailing path separator if present in input, even
-          // if directory doesn't exist yet
-          expectedClassPaths.add(new File(classPath).toURI().toURL()
-            .toExternalForm() + Path.SEPARATOR);
         } else {
         } else {
-          expectedClassPaths.add(new File(classPath).toURI().toURL()
-            .toExternalForm());
+          File fileCp = null;
+          if(!new Path(classPath).isAbsolute()) {
+            fileCp = new File(tmp, classPath);
+          }
+          else {
+            fileCp = new File(classPath);
+          }
+          if (nonExistentSubdir.equals(classPath)) {
+            // expect to maintain trailing path separator if present in input, even
+            // if directory doesn't exist yet
+            expectedClassPaths.add(fileCp.toURI().toURL()
+              .toExternalForm() + Path.SEPARATOR);
+          } else {
+            expectedClassPaths.add(fileCp.toURI().toURL()
+              .toExternalForm());
+          }
         }
         }
       }
       }
       List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));
       List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));

+ 15 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java

@@ -280,6 +280,21 @@ public class TestLocalFileSystem {
         stats[0].getPath().toUri().getPath());
         stats[0].getPath().toUri().getPath());
   }
   }
   
   
+  @Test
+  public void testListStatusReturnConsistentPathOnWindows() throws IOException {
+    assumeTrue(Shell.WINDOWS);
+    String dirNoDriveSpec = TEST_ROOT_DIR;
+    if (dirNoDriveSpec.charAt(1) == ':')
+    	dirNoDriveSpec = dirNoDriveSpec.substring(2);
+    
+    File file = new File(dirNoDriveSpec, "foo");
+    file.mkdirs();
+    FileStatus[] stats = fileSys.listStatus(new Path(dirNoDriveSpec));
+    assertEquals("Unexpected number of stats", 1, stats.length);
+    assertEquals("Bad path from stat", new Path(file.getPath()).toUri().getPath(),
+        stats[0].getPath().toUri().getPath());
+  }
+  
   @Test(timeout = 10000)
   @Test(timeout = 10000)
   public void testReportChecksumFailure() throws IOException {
   public void testReportChecksumFailure() throws IOException {
     base.mkdirs();
     base.mkdirs();

+ 37 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java

@@ -158,7 +158,43 @@ public class TestPath extends TestCase {
       assertEquals(new Path("c:/foo"), new Path("d:/bar", "c:/foo"));
       assertEquals(new Path("c:/foo"), new Path("d:/bar", "c:/foo"));
     }
     }
   }
   }
-  
+
+  @Test (timeout = 30000)
+  public void testPathThreeArgContructor() {
+    assertEquals(new Path("foo"), new Path(null, null, "foo"));
+    assertEquals(new Path("scheme:///foo"), new Path("scheme", null, "/foo"));
+    assertEquals(
+        new Path("scheme://authority/foo"),
+        new Path("scheme", "authority", "/foo"));
+
+    if (Path.WINDOWS) {
+      assertEquals(new Path("c:/foo/bar"), new Path(null, null, "c:/foo/bar"));
+      assertEquals(new Path("c:/foo/bar"), new Path(null, null, "/c:/foo/bar"));
+    } else {
+      assertEquals(new Path("./a:b"), new Path(null, null, "a:b"));
+    }
+
+    // Resolution tests
+    if (Path.WINDOWS) {
+      assertEquals(
+          new Path("c:/foo/bar"),
+          new Path("/fou", new Path(null, null, "c:/foo/bar")));
+      assertEquals(
+          new Path("c:/foo/bar"),
+          new Path("/fou", new Path(null, null, "/c:/foo/bar")));
+      assertEquals(
+          new Path("/foo/bar"),
+          new Path("/foo", new Path(null, null, "bar")));
+    } else {
+      assertEquals(
+          new Path("/foo/bar/a:b"),
+          new Path("/foo/bar", new Path(null, null, "a:b")));
+      assertEquals(
+          new Path("/a:b"),
+          new Path("/foo/bar", new Path(null, null, "/a:b")));
+    }
+  }
+
   @Test (timeout = 30000)
   @Test (timeout = 30000)
   public void testEquals() {
   public void testEquals() {
     assertFalse(new Path("/").equals(new Path("/foo")));
     assertFalse(new Path("/").equals(new Path("/foo")));

+ 27 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java

@@ -17,7 +17,9 @@
  */
  */
 package org.apache.hadoop.fs;
 package org.apache.hadoop.fs;
 
 
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.fail;
 import static org.junit.Assert.fail;
 
 
 import java.io.BufferedReader;
 import java.io.BufferedReader;
@@ -25,10 +27,11 @@ import java.io.FileNotFoundException;
 import java.io.StringReader;
 import java.io.StringReader;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
+import org.junit.Assume;
 import org.junit.BeforeClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.Test;
 
 
-public class TestStat {
+public class TestStat extends FileSystemTestHelper {
 
 
   private static Stat stat;
   private static Stat stat;
 
 
@@ -112,6 +115,7 @@ public class TestStat {
 
 
   @Test(timeout=10000)
   @Test(timeout=10000)
   public void testStatFileNotFound() throws Exception {
   public void testStatFileNotFound() throws Exception {
+    Assume.assumeTrue(Stat.isAvailable());
     try {
     try {
       stat.getFileStatus();
       stat.getFileStatus();
       fail("Expected FileNotFoundException");
       fail("Expected FileNotFoundException");
@@ -119,4 +123,26 @@ public class TestStat {
       // expected
       // expected
     }
     }
   }
   }
+  
+  @Test(timeout=10000)
+  public void testStatEnvironment() throws Exception {
+    assertEquals(stat.getEnvironment("LANG"), "C");
+  }
+
+  @Test(timeout=10000)
+  public void testStat() throws Exception {
+    Assume.assumeTrue(Stat.isAvailable());
+    FileSystem fs = FileSystem.getLocal(new Configuration());
+    Path testDir = new Path(getTestRootPath(fs), "teststat");
+    fs.mkdirs(testDir);
+    Path sub1 = new Path(testDir, "sub1");
+    Path sub2 = new Path(testDir, "sub2");
+    fs.mkdirs(sub1);
+    fs.createSymlink(sub1, sub2, false);
+    FileStatus stat1 = new Stat(sub1, 4096l, false, fs).getFileStatus();
+    FileStatus stat2 = new Stat(sub2, 0, false, fs).getFileStatus();
+    assertTrue(stat1.isDirectory());
+    assertFalse(stat2.isDirectory());
+    fs.delete(testDir, true);
+  }
 }
 }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElector.java

@@ -41,7 +41,7 @@ import org.mockito.Mockito;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveNotFoundException;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveNotFoundException;
-import org.apache.hadoop.ha.HAZKUtil.ZKAuthInfo;
+import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.GenericTestUtils;
 
 
 public class TestActiveStandbyElector {
 public class TestActiveStandbyElector {

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java

@@ -28,7 +28,7 @@ import java.util.UUID;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.State;
 import org.apache.hadoop.ha.ActiveStandbyElector.State;
-import org.apache.hadoop.ha.HAZKUtil.ZKAuthInfo;
+import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Shell;
 import org.apache.log4j.Level;
 import org.apache.log4j.Level;
 import org.apache.zookeeper.ZooDefs.Ids;
 import org.apache.zookeeper.ZooDefs.Ids;

+ 16 - 6
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java

@@ -101,8 +101,12 @@ public class HttpServerFunctionalTest extends Assert {
     String webapps = System.getProperty(TEST_BUILD_WEBAPPS, BUILD_WEBAPPS_DIR);
     String webapps = System.getProperty(TEST_BUILD_WEBAPPS, BUILD_WEBAPPS_DIR);
     File testWebappDir = new File(webapps +
     File testWebappDir = new File(webapps +
         File.separatorChar + TEST);
         File.separatorChar + TEST);
+    try {
     if (!testWebappDir.exists()) {
     if (!testWebappDir.exists()) {
-      fail("Test webapp dir " + testWebappDir + " missing");
+      fail("Test webapp dir " + testWebappDir.getCanonicalPath() + " missing");
+    }
+    }
+    catch (IOException e) {
     }
     }
   }
   }
 
 
@@ -116,7 +120,8 @@ public class HttpServerFunctionalTest extends Assert {
   public static HttpServer createServer(String host, int port)
   public static HttpServer createServer(String host, int port)
       throws IOException {
       throws IOException {
     prepareTestWebapp();
     prepareTestWebapp();
-    return new HttpServer(TEST, host, port, true);
+    return new HttpServer.Builder().setName(TEST).setBindAddress(host)
+        .setPort(port).setFindPort(true).build();
   }
   }
 
 
   /**
   /**
@@ -126,7 +131,8 @@ public class HttpServerFunctionalTest extends Assert {
    * @throws IOException if it could not be created
    * @throws IOException if it could not be created
    */
    */
   public static HttpServer createServer(String webapp) throws IOException {
   public static HttpServer createServer(String webapp) throws IOException {
-    return new HttpServer(webapp, "0.0.0.0", 0, true);
+    return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
+        .setPort(0).setFindPort(true).build();
   }
   }
   /**
   /**
    * Create an HttpServer instance for the given webapp
    * Create an HttpServer instance for the given webapp
@@ -137,13 +143,16 @@ public class HttpServerFunctionalTest extends Assert {
    */
    */
   public static HttpServer createServer(String webapp, Configuration conf)
   public static HttpServer createServer(String webapp, Configuration conf)
       throws IOException {
       throws IOException {
-    return new HttpServer(webapp, "0.0.0.0", 0, true, conf);
+    return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
+        .setPort(0).setFindPort(true).setConf(conf).build();
   }
   }
 
 
   public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
   public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
       throws IOException {
       throws IOException {
-    return new HttpServer(webapp, "0.0.0.0", 0, true, conf, adminsAcl);
+    return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
+        .setPort(0).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
   }
   }
+  
   /**
   /**
    * Create an HttpServer instance for the given webapp
    * Create an HttpServer instance for the given webapp
    * @param webapp the webapp to work with
    * @param webapp the webapp to work with
@@ -154,7 +163,8 @@ public class HttpServerFunctionalTest extends Assert {
    */
    */
   public static HttpServer createServer(String webapp, Configuration conf,
   public static HttpServer createServer(String webapp, Configuration conf,
       String[] pathSpecs) throws IOException {
       String[] pathSpecs) throws IOException {
-    return new HttpServer(webapp, "0.0.0.0", 0, true, conf, pathSpecs);
+    return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
+        .setPort(0).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
   }
   }
 
 
   /**
   /**

+ 47 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java

@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.http;
+
+import org.apache.log4j.Logger;
+import org.junit.Test;
+import org.mortbay.jetty.NCSARequestLog;
+import org.mortbay.jetty.RequestLog;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+public class TestHttpRequestLog {
+
+  @Test
+  public void testAppenderUndefined() {
+    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
+    assertNull("RequestLog should be null", requestLog);
+  }
+
+  @Test
+  public void testAppenderDefined() {
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setName("testrequestlog");
+    Logger.getLogger("http.requests.test").addAppender(requestLogAppender);
+    RequestLog requestLog = HttpRequestLog.getRequestLog("test");
+    Logger.getLogger("http.requests.test").removeAppender(requestLogAppender);
+    assertNotNull("RequestLog should not be null", requestLog);
+    assertEquals("Class mismatch", NCSARequestLog.class, requestLog.getClass());
+  }
+}

+ 37 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLogAppender.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.http;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestHttpRequestLogAppender {
+
+  @Test
+  public void testParameterPropagation() {
+
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setFilename("jetty-namenode-yyyy_mm_dd.log");
+    requestLogAppender.setRetainDays(17);
+    assertEquals("Filename mismatch", "jetty-namenode-yyyy_mm_dd.log",
+        requestLogAppender.getFilename());
+    assertEquals("Retain days mismatch", 17,
+        requestLogAppender.getRetainDays());
+  }
+}

+ 5 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java

@@ -121,7 +121,6 @@ public class TestHttpServer extends HttpServerFunctionalTest {
 
 
   @SuppressWarnings("serial")
   @SuppressWarnings("serial")
   public static class LongHeaderServlet extends HttpServlet {
   public static class LongHeaderServlet extends HttpServlet {
-    @SuppressWarnings("unchecked")
     @Override
     @Override
     public void doGet(HttpServletRequest request,
     public void doGet(HttpServletRequest request,
                       HttpServletResponse response
                       HttpServletResponse response
@@ -362,7 +361,8 @@ public class TestHttpServer extends HttpServerFunctionalTest {
     MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
     MyGroupsProvider.mapping.put("userA", Arrays.asList("groupA"));
     MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
     MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
 
 
-    HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf);
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+        .setBindAddress("0.0.0.0").setPort(0).setFindPort(true).build();
     myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
     myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
     myServer.start();
     myServer.start();
     int port = myServer.getPort();
     int port = myServer.getPort();
@@ -403,8 +403,9 @@ public class TestHttpServer extends HttpServerFunctionalTest {
     MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
     MyGroupsProvider.mapping.put("userD", Arrays.asList("groupD"));
     MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
     MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
 
 
-    HttpServer myServer = new HttpServer("test", "0.0.0.0", 0, true, conf,
-        new AccessControlList("userA,userB groupC,groupD"));
+    HttpServer myServer = new HttpServer.Builder().setName("test")
+        .setBindAddress("0.0.0.0").setPort(0).setFindPort(true).setConf(conf)
+        .setACL(new AccessControlList("userA,userB groupC,groupD")).build();
     myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
     myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
     myServer.start();
     myServer.start();
     int port = myServer.getPort();
     int port = myServer.getPort();

+ 22 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java

@@ -17,6 +17,7 @@
  */
  */
 package org.apache.hadoop.http;
 package org.apache.hadoop.http;
 
 
+import org.apache.log4j.Logger;
 import org.junit.Test;
 import org.junit.Test;
 
 
 public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
 public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
@@ -66,6 +67,27 @@ public class TestHttpServerLifecycle extends HttpServerFunctionalTest {
     stop(server);
     stop(server);
   }
   }
 
 
+  /**
+   * Test that the server with request logging enabled
+   *
+   * @throws Throwable on failure
+   */
+  @Test
+  public void testStartedServerWithRequestLog() throws Throwable {
+    HttpRequestLogAppender requestLogAppender = new HttpRequestLogAppender();
+    requestLogAppender.setName("httprequestlog");
+    requestLogAppender.setFilename(System.getProperty("test.build.data", "/tmp/")
+        + "jetty-name-yyyy_mm_dd.log");
+    Logger.getLogger(HttpServer.class.getName() + ".test").addAppender(requestLogAppender);
+    HttpServer server = null;
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    assertAlive(server);
+    stop(server);
+    Logger.getLogger(HttpServer.class.getName() + ".test").removeAppender(requestLogAppender);
+  }
+
   /**
   /**
    * Assert that the result of {@link HttpServer#toString()} contains the specific text
    * Assert that the result of {@link HttpServer#toString()} contains the specific text
    * @param server server to examine
    * @param server server to examine

+ 31 - 29
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java

@@ -216,13 +216,13 @@ public class TestIPC {
     }
     }
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testSerial() throws IOException, InterruptedException {
   public void testSerial() throws IOException, InterruptedException {
-    testSerial(3, false, 2, 5, 100);
-    testSerial(3, true, 2, 5, 10);
+    internalTestSerial(3, false, 2, 5, 100);
+    internalTestSerial(3, true, 2, 5, 10);
   }
   }
 
 
-  public void testSerial(int handlerCount, boolean handlerSleep, 
+  public void internalTestSerial(int handlerCount, boolean handlerSleep,
                          int clientCount, int callerCount, int callCount)
                          int clientCount, int callerCount, int callCount)
     throws IOException, InterruptedException {
     throws IOException, InterruptedException {
     Server server = new TestServer(handlerCount, handlerSleep);
     Server server = new TestServer(handlerCount, handlerSleep);
@@ -249,7 +249,7 @@ public class TestIPC {
     server.stop();
     server.stop();
   }
   }
 	
 	
-  @Test
+  @Test(timeout=60000)
   public void testStandAloneClient() throws IOException {
   public void testStandAloneClient() throws IOException {
     Client client = new Client(LongWritable.class, conf);
     Client client = new Client(LongWritable.class, conf);
     InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10);
     InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10);
@@ -383,7 +383,7 @@ public class TestIPC {
     }
     }
   }
   }
 
 
-  @Test
+  @Test(timeout=60000)
   public void testIOEOnClientWriteParam() throws Exception {
   public void testIOEOnClientWriteParam() throws Exception {
     doErrorTest(IOEOnWriteWritable.class,
     doErrorTest(IOEOnWriteWritable.class,
         LongWritable.class,
         LongWritable.class,
@@ -391,7 +391,7 @@ public class TestIPC {
         LongWritable.class);
         LongWritable.class);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testRTEOnClientWriteParam() throws Exception {
   public void testRTEOnClientWriteParam() throws Exception {
     doErrorTest(RTEOnWriteWritable.class,
     doErrorTest(RTEOnWriteWritable.class,
         LongWritable.class,
         LongWritable.class,
@@ -399,7 +399,7 @@ public class TestIPC {
         LongWritable.class);
         LongWritable.class);
   }
   }
 
 
-  @Test
+  @Test(timeout=60000)
   public void testIOEOnServerReadParam() throws Exception {
   public void testIOEOnServerReadParam() throws Exception {
     doErrorTest(LongWritable.class,
     doErrorTest(LongWritable.class,
         IOEOnReadWritable.class,
         IOEOnReadWritable.class,
@@ -407,7 +407,7 @@ public class TestIPC {
         LongWritable.class);
         LongWritable.class);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testRTEOnServerReadParam() throws Exception {
   public void testRTEOnServerReadParam() throws Exception {
     doErrorTest(LongWritable.class,
     doErrorTest(LongWritable.class,
         RTEOnReadWritable.class,
         RTEOnReadWritable.class,
@@ -416,7 +416,7 @@ public class TestIPC {
   }
   }
 
 
   
   
-  @Test
+  @Test(timeout=60000)
   public void testIOEOnServerWriteResponse() throws Exception {
   public void testIOEOnServerWriteResponse() throws Exception {
     doErrorTest(LongWritable.class,
     doErrorTest(LongWritable.class,
         LongWritable.class,
         LongWritable.class,
@@ -424,7 +424,7 @@ public class TestIPC {
         LongWritable.class);
         LongWritable.class);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testRTEOnServerWriteResponse() throws Exception {
   public void testRTEOnServerWriteResponse() throws Exception {
     doErrorTest(LongWritable.class,
     doErrorTest(LongWritable.class,
         LongWritable.class,
         LongWritable.class,
@@ -432,7 +432,7 @@ public class TestIPC {
         LongWritable.class);
         LongWritable.class);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testIOEOnClientReadResponse() throws Exception {
   public void testIOEOnClientReadResponse() throws Exception {
     doErrorTest(LongWritable.class,
     doErrorTest(LongWritable.class,
         LongWritable.class,
         LongWritable.class,
@@ -440,7 +440,7 @@ public class TestIPC {
         IOEOnReadWritable.class);
         IOEOnReadWritable.class);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testRTEOnClientReadResponse() throws Exception {
   public void testRTEOnClientReadResponse() throws Exception {
     doErrorTest(LongWritable.class,
     doErrorTest(LongWritable.class,
         LongWritable.class,
         LongWritable.class,
@@ -453,7 +453,7 @@ public class TestIPC {
    * that a ping should have been sent. This is a reproducer for a
    * that a ping should have been sent. This is a reproducer for a
    * deadlock seen in one iteration of HADOOP-6762.
    * deadlock seen in one iteration of HADOOP-6762.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testIOEOnWriteAfterPingClient() throws Exception {
   public void testIOEOnWriteAfterPingClient() throws Exception {
     // start server
     // start server
     Client.setPingInterval(conf, 100);
     Client.setPingInterval(conf, 100);
@@ -481,7 +481,7 @@ public class TestIPC {
    * Test that, if the socket factory throws an IOE, it properly propagates
    * Test that, if the socket factory throws an IOE, it properly propagates
    * to the client.
    * to the client.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testSocketFactoryException() throws IOException {
   public void testSocketFactoryException() throws IOException {
     SocketFactory mockFactory = mock(SocketFactory.class);
     SocketFactory mockFactory = mock(SocketFactory.class);
     doThrow(new IOException("Injected fault")).when(mockFactory).createSocket();
     doThrow(new IOException("Injected fault")).when(mockFactory).createSocket();
@@ -503,7 +503,7 @@ public class TestIPC {
    * failure is handled properly. This is a regression test for
    * failure is handled properly. This is a regression test for
    * HADOOP-7428.
    * HADOOP-7428.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testRTEDuringConnectionSetup() throws IOException {
   public void testRTEDuringConnectionSetup() throws IOException {
     // Set up a socket factory which returns sockets which
     // Set up a socket factory which returns sockets which
     // throw an RTE when setSoTimeout is called.
     // throw an RTE when setSoTimeout is called.
@@ -544,7 +544,7 @@ public class TestIPC {
     }
     }
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testIpcTimeout() throws IOException {
   public void testIpcTimeout() throws IOException {
     // start server
     // start server
     Server server = new TestServer(1, true);
     Server server = new TestServer(1, true);
@@ -566,7 +566,7 @@ public class TestIPC {
         addr, null, null, 3*PING_INTERVAL+MIN_SLEEP_TIME, conf);
         addr, null, null, 3*PING_INTERVAL+MIN_SLEEP_TIME, conf);
   }
   }
 
 
-  @Test
+  @Test(timeout=60000)
   public void testIpcConnectTimeout() throws IOException {
   public void testIpcConnectTimeout() throws IOException {
     // start server
     // start server
     Server server = new TestServer(1, true);
     Server server = new TestServer(1, true);
@@ -670,31 +670,31 @@ public class TestIPC {
     return FD_DIR.list().length;
     return FD_DIR.list().length;
   }
   }
 
 
-  @Test
+  @Test(timeout=60000)
   public void testIpcFromHadoop_0_18_13() throws IOException {
   public void testIpcFromHadoop_0_18_13() throws IOException {
     doIpcVersionTest(NetworkTraces.HADOOP_0_18_3_RPC_DUMP,
     doIpcVersionTest(NetworkTraces.HADOOP_0_18_3_RPC_DUMP,
         NetworkTraces.RESPONSE_TO_HADOOP_0_18_3_RPC);
         NetworkTraces.RESPONSE_TO_HADOOP_0_18_3_RPC);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testIpcFromHadoop0_20_3() throws IOException {
   public void testIpcFromHadoop0_20_3() throws IOException {
     doIpcVersionTest(NetworkTraces.HADOOP_0_20_3_RPC_DUMP,
     doIpcVersionTest(NetworkTraces.HADOOP_0_20_3_RPC_DUMP,
         NetworkTraces.RESPONSE_TO_HADOOP_0_20_3_RPC);
         NetworkTraces.RESPONSE_TO_HADOOP_0_20_3_RPC);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testIpcFromHadoop0_21_0() throws IOException {
   public void testIpcFromHadoop0_21_0() throws IOException {
     doIpcVersionTest(NetworkTraces.HADOOP_0_21_0_RPC_DUMP,
     doIpcVersionTest(NetworkTraces.HADOOP_0_21_0_RPC_DUMP,
         NetworkTraces.RESPONSE_TO_HADOOP_0_21_0_RPC);
         NetworkTraces.RESPONSE_TO_HADOOP_0_21_0_RPC);
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testHttpGetResponse() throws IOException {
   public void testHttpGetResponse() throws IOException {
     doIpcVersionTest("GET / HTTP/1.0\r\n\r\n".getBytes(),
     doIpcVersionTest("GET / HTTP/1.0\r\n\r\n".getBytes(),
         Server.RECEIVED_HTTP_REQ_RESPONSE.getBytes());
         Server.RECEIVED_HTTP_REQ_RESPONSE.getBytes());
   }
   }
   
   
-  @Test
+  @Test(timeout=60000)
   public void testConnectionRetriesOnSocketTimeoutExceptions() throws IOException {
   public void testConnectionRetriesOnSocketTimeoutExceptions() throws IOException {
     Configuration conf = new Configuration();
     Configuration conf = new Configuration();
     // set max retries to 0
     // set max retries to 0
@@ -720,7 +720,7 @@ public class TestIPC {
    * (1) the rpc server uses the call id/retry provided by the rpc client, and
    * (1) the rpc server uses the call id/retry provided by the rpc client, and
    * (2) the rpc client receives the same call id/retry from the rpc server.
    * (2) the rpc client receives the same call id/retry from the rpc server.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testCallIdAndRetry() throws IOException {
   public void testCallIdAndRetry() throws IOException {
     final CallInfo info = new CallInfo();
     final CallInfo info = new CallInfo();
 
 
@@ -772,7 +772,7 @@ public class TestIPC {
   /**
   /**
    * Test the retry count while used in a retry proxy.
    * Test the retry count while used in a retry proxy.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testRetryProxy() throws IOException {
   public void testRetryProxy() throws IOException {
     final Client client = new Client(LongWritable.class, conf);
     final Client client = new Client(LongWritable.class, conf);
     
     
@@ -785,7 +785,9 @@ public class TestIPC {
       }
       }
     };
     };
 
 
-    final int totalRetry = 256;
+    // try more times, so it is easier to find race condition bug
+    // 10000 times runs about 6s on a core i7 machine
+    final int totalRetry = 10000;
     DummyProtocol proxy = (DummyProtocol) Proxy.newProxyInstance(
     DummyProtocol proxy = (DummyProtocol) Proxy.newProxyInstance(
         DummyProtocol.class.getClassLoader(),
         DummyProtocol.class.getClassLoader(),
         new Class[] { DummyProtocol.class }, new TestInvocationHandler(client,
         new Class[] { DummyProtocol.class }, new TestInvocationHandler(client,
@@ -807,7 +809,7 @@ public class TestIPC {
   /**
   /**
    * Test if the rpc server gets the default retry count (0) from client.
    * Test if the rpc server gets the default retry count (0) from client.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testInitialCallRetryCount() throws IOException {
   public void testInitialCallRetryCount() throws IOException {
     // Override client to store the call id
     // Override client to store the call id
     final Client client = new Client(LongWritable.class, conf);
     final Client client = new Client(LongWritable.class, conf);
@@ -838,7 +840,7 @@ public class TestIPC {
   /**
   /**
    * Test if the rpc server gets the retry count from client.
    * Test if the rpc server gets the retry count from client.
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testCallRetryCount() throws IOException {
   public void testCallRetryCount() throws IOException {
     final int retryCount = 255;
     final int retryCount = 255;
     // Override client to store the call id
     // Override client to store the call id
@@ -873,7 +875,7 @@ public class TestIPC {
    * even if multiple threads are using the same client.
    * even if multiple threads are using the same client.
  * @throws InterruptedException 
  * @throws InterruptedException 
    */
    */
-  @Test
+  @Test(timeout=60000)
   public void testUniqueSequentialCallIds() 
   public void testUniqueSequentialCallIds() 
       throws IOException, InterruptedException {
       throws IOException, InterruptedException {
     int serverThreads = 10, callerCount = 100, perCallerCallCount = 100;
     int serverThreads = 10, callerCount = 100, perCallerCallCount = 100;

+ 4 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java

@@ -42,7 +42,10 @@ public class TestLogLevel extends TestCase {
       log.error("log.error1");
       log.error("log.error1");
       assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
       assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
 
 
-      HttpServer server = new HttpServer("..", "localhost", 22222, true);
+      HttpServer server = new HttpServer.Builder().setName("..")
+          .setBindAddress("localhost").setPort(22222).setFindPort(true)
+          .build();
+      
       server.start();
       server.start();
       int port = server.getPort();
       int port = server.getPort();
 
 

+ 6 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java

@@ -38,6 +38,7 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenInfo;
 import org.apache.hadoop.security.token.TokenInfo;
+import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
 import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager;
 import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager;
 import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier;
 import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier;
@@ -58,7 +59,7 @@ public class TestDoAsEffectiveUser {
       GROUP2_NAME };
       GROUP2_NAME };
   private static final String ADDRESS = "0.0.0.0";
   private static final String ADDRESS = "0.0.0.0";
   private TestProtocol proxy;
   private TestProtocol proxy;
-  private static Configuration masterConf = new Configuration();
+  private static final Configuration masterConf = new Configuration();
   
   
   
   
   public static final Log LOG = LogFactory
   public static final Log LOG = LogFactory
@@ -70,6 +71,10 @@ public class TestDoAsEffectiveUser {
         "RULE:[2:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//" +
         "RULE:[2:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//" +
         "RULE:[1:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
         "RULE:[1:$1@$0](.*@HADOOP.APACHE.ORG)s/@.*//"
         + "DEFAULT");
         + "DEFAULT");
+  }
+
+  @Before
+  public void setMasterConf() {
     UserGroupInformation.setConfiguration(masterConf);
     UserGroupInformation.setConfiguration(masterConf);
   }
   }
 
 

+ 17 - 16
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAZKUtil.java → hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.hadoop.ha;
+package org.apache.hadoop.util;
 
 
 import static org.junit.Assert.*;
 import static org.junit.Assert.*;
 
 
@@ -24,8 +24,9 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.IOException;
 import java.util.List;
 import java.util.List;
 
 
-import org.apache.hadoop.ha.HAZKUtil.BadAclFormatException;
-import org.apache.hadoop.ha.HAZKUtil.ZKAuthInfo;
+import org.apache.hadoop.util.ZKUtil;
+import org.apache.hadoop.util.ZKUtil.BadAclFormatException;
+import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
 import org.apache.zookeeper.ZooDefs.Perms;
 import org.apache.zookeeper.ZooDefs.Perms;
 import org.apache.zookeeper.data.ACL;
 import org.apache.zookeeper.data.ACL;
 import org.junit.Test;
 import org.junit.Test;
@@ -33,9 +34,9 @@ import org.junit.Test;
 import com.google.common.base.Charsets;
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 import com.google.common.io.Files;
 
 
-public class TestHAZKUtil {
+public class TestZKUtil {
   private static final String TEST_ROOT_DIR = System.getProperty(
   private static final String TEST_ROOT_DIR = System.getProperty(
-      "test.build.data", "/tmp") + "/TestHAZKUtil";
+      "test.build.data", "/tmp") + "/TestZKUtil";
   private static final File TEST_FILE = new File(TEST_ROOT_DIR,
   private static final File TEST_FILE = new File(TEST_ROOT_DIR,
       "test-file");
       "test-file");
   
   
@@ -45,13 +46,13 @@ public class TestHAZKUtil {
 
 
   @Test
   @Test
   public void testEmptyACL() {
   public void testEmptyACL() {
-    List<ACL> result = HAZKUtil.parseACLs("");
+    List<ACL> result = ZKUtil.parseACLs("");
     assertTrue(result.isEmpty());
     assertTrue(result.isEmpty());
   }
   }
   
   
   @Test
   @Test
   public void testNullACL() {
   public void testNullACL() {
-    List<ACL> result = HAZKUtil.parseACLs(null);
+    List<ACL> result = ZKUtil.parseACLs(null);
     assertTrue(result.isEmpty());
     assertTrue(result.isEmpty());
   }
   }
   
   
@@ -67,7 +68,7 @@ public class TestHAZKUtil {
   
   
   private static void badAcl(String acls, String expectedErr) {
   private static void badAcl(String acls, String expectedErr) {
     try {
     try {
-      HAZKUtil.parseACLs(acls);
+      ZKUtil.parseACLs(acls);
       fail("Should have failed to parse '" + acls + "'");
       fail("Should have failed to parse '" + acls + "'");
     } catch (BadAclFormatException e) {
     } catch (BadAclFormatException e) {
       assertEquals(expectedErr, e.getMessage());
       assertEquals(expectedErr, e.getMessage());
@@ -76,7 +77,7 @@ public class TestHAZKUtil {
 
 
   @Test
   @Test
   public void testGoodACLs() {
   public void testGoodACLs() {
-    List<ACL> result = HAZKUtil.parseACLs(
+    List<ACL> result = ZKUtil.parseACLs(
         "sasl:hdfs/host1@MY.DOMAIN:cdrwa, sasl:hdfs/host2@MY.DOMAIN:ca");
         "sasl:hdfs/host1@MY.DOMAIN:cdrwa, sasl:hdfs/host2@MY.DOMAIN:ca");
     ACL acl0 = result.get(0);
     ACL acl0 = result.get(0);
     assertEquals(Perms.CREATE | Perms.DELETE | Perms.READ |
     assertEquals(Perms.CREATE | Perms.DELETE | Perms.READ |
@@ -92,19 +93,19 @@ public class TestHAZKUtil {
   
   
   @Test
   @Test
   public void testEmptyAuth() {
   public void testEmptyAuth() {
-    List<ZKAuthInfo> result = HAZKUtil.parseAuth("");
+    List<ZKAuthInfo> result = ZKUtil.parseAuth("");
     assertTrue(result.isEmpty());
     assertTrue(result.isEmpty());
   }
   }
   
   
   @Test
   @Test
   public void testNullAuth() {
   public void testNullAuth() {
-    List<ZKAuthInfo> result = HAZKUtil.parseAuth(null);
+    List<ZKAuthInfo> result = ZKUtil.parseAuth(null);
     assertTrue(result.isEmpty());
     assertTrue(result.isEmpty());
   }
   }
   
   
   @Test
   @Test
   public void testGoodAuths() {
   public void testGoodAuths() {
-    List<ZKAuthInfo> result = HAZKUtil.parseAuth(
+    List<ZKAuthInfo> result = ZKUtil.parseAuth(
         "scheme:data,\n   scheme2:user:pass");
         "scheme:data,\n   scheme2:user:pass");
     assertEquals(2, result.size());
     assertEquals(2, result.size());
     ZKAuthInfo auth0 = result.get(0);
     ZKAuthInfo auth0 = result.get(0);
@@ -118,16 +119,16 @@ public class TestHAZKUtil {
   
   
   @Test
   @Test
   public void testConfIndirection() throws IOException {
   public void testConfIndirection() throws IOException {
-    assertNull(HAZKUtil.resolveConfIndirection(null));
-    assertEquals("x", HAZKUtil.resolveConfIndirection("x"));
+    assertNull(ZKUtil.resolveConfIndirection(null));
+    assertEquals("x", ZKUtil.resolveConfIndirection("x"));
     
     
     TEST_FILE.getParentFile().mkdirs();
     TEST_FILE.getParentFile().mkdirs();
     Files.write("hello world", TEST_FILE, Charsets.UTF_8);
     Files.write("hello world", TEST_FILE, Charsets.UTF_8);
-    assertEquals("hello world", HAZKUtil.resolveConfIndirection(
+    assertEquals("hello world", ZKUtil.resolveConfIndirection(
         "@" + TEST_FILE.getAbsolutePath()));
         "@" + TEST_FILE.getAbsolutePath()));
     
     
     try {
     try {
-      HAZKUtil.resolveConfIndirection("@" + BOGUS_FILE);
+      ZKUtil.resolveConfIndirection("@" + BOGUS_FILE);
       fail("Did not throw for non-existent file reference");
       fail("Did not throw for non-existent file reference");
     } catch (FileNotFoundException fnfe) {
     } catch (FileNotFoundException fnfe) {
       assertTrue(fnfe.getMessage().startsWith(BOGUS_FILE));
       assertTrue(fnfe.getMessage().startsWith(BOGUS_FILE));

+ 1 - 1
hadoop-common-project/hadoop-minikdc/pom.xml

@@ -38,7 +38,7 @@
     <dependency>
     <dependency>
       <groupId>org.apache.directory.server</groupId>
       <groupId>org.apache.directory.server</groupId>
       <artifactId>apacheds-all</artifactId>
       <artifactId>apacheds-all</artifactId>
-      <version>2.0.0-M14</version>
+      <version>2.0.0-M15</version>
       <scope>compile</scope>
       <scope>compile</scope>
     </dependency>
     </dependency>
     <dependency>
     <dependency>

+ 0 - 121
hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytabEncoder.java

@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.directory.server.kerberos.shared.keytab;
-
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
-
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-import java.util.List;
-
-//This is a hack for ApacheDS 2.0.0-M14 to be able to create
-//keytab files with more than one principal.
-//It needs to be in this package because the KeytabEncoder class is package 
-// private.
-//This class can be removed once jira DIRSERVER-1882
-// (https://issues.apache.org/jira/browse/DIRSERVER-1882) solved
-class HackedKeytabEncoder extends KeytabEncoder {
-
-  ByteBuffer write( byte[] keytabVersion, List<KeytabEntry> entries,
-                    int principalCount )
-  {
-    ByteBuffer buffer = ByteBuffer.allocate( 512 * principalCount);
-    putKeytabVersion(buffer, keytabVersion);
-    putKeytabEntries( buffer, entries );
-    buffer.flip();
-    return buffer;
-  }
-
-  private void putKeytabVersion( ByteBuffer buffer, byte[] version )
-  {
-    buffer.put( version );
-  }
-
-  private void putKeytabEntries( ByteBuffer buffer, List<KeytabEntry> entries )
-  {
-    Iterator<KeytabEntry> iterator = entries.iterator();
-
-    while ( iterator.hasNext() )
-    {
-      ByteBuffer entryBuffer = putKeytabEntry( iterator.next() );
-      int size = entryBuffer.position();
-
-      entryBuffer.flip();
-
-      buffer.putInt( size );
-      buffer.put( entryBuffer );
-    }
-  }
-
-  private ByteBuffer putKeytabEntry( KeytabEntry entry )
-  {
-    ByteBuffer buffer = ByteBuffer.allocate( 100 );
-
-    putPrincipalName( buffer, entry.getPrincipalName() );
-
-    buffer.putInt( ( int ) entry.getPrincipalType() );
-
-    buffer.putInt( ( int ) ( entry.getTimeStamp().getTime() / 1000 ) );
-
-    buffer.put( entry.getKeyVersion() );
-
-    putKeyBlock( buffer, entry.getKey() );
-
-    return buffer;
-  }
-
-  private void putPrincipalName( ByteBuffer buffer, String principalName )
-  {
-    String[] split = principalName.split("@");
-    String nameComponent = split[0];
-    String realm = split[1];
-
-    String[] nameComponents = nameComponent.split( "/" );
-
-    // increment for v1
-    buffer.putShort( ( short ) nameComponents.length );
-
-    putCountedString( buffer, realm );
-    // write components
-
-    for ( int ii = 0; ii < nameComponents.length; ii++ )
-    {
-      putCountedString( buffer, nameComponents[ii] );
-    }
-  }
-
-  private void putKeyBlock( ByteBuffer buffer, EncryptionKey key )
-  {
-    buffer.putShort( ( short ) key.getKeyType().getValue() );
-    putCountedBytes( buffer, key.getKeyValue() );
-  }
-
-  private void putCountedString( ByteBuffer buffer, String string )
-  {
-    byte[] data = string.getBytes();
-    buffer.putShort( ( short ) data.length );
-    buffer.put( data );
-  }
-
-  private void putCountedBytes( ByteBuffer buffer, byte[] data )
-  {
-    buffer.putShort( ( short ) data.length );
-    buffer.put( data );
-  }
-
-}

+ 15 - 3
hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java

@@ -37,7 +37,7 @@ import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
 import org.apache.directory.server.core.partition.ldif.LdifPartition;
 import org.apache.directory.server.core.partition.ldif.LdifPartition;
 import org.apache.directory.server.kerberos.kdc.KdcServer;
 import org.apache.directory.server.kerberos.kdc.KdcServer;
 import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
 import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
-import org.apache.directory.server.kerberos.shared.keytab.HackedKeytab;
+import org.apache.directory.server.kerberos.shared.keytab.Keytab;
 import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
 import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
 import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 import org.apache.directory.server.protocol.shared.transport.UdpTransport;
 import org.apache.directory.server.protocol.shared.transport.UdpTransport;
@@ -59,6 +59,7 @@ import java.io.FileReader;
 import java.io.InputStream;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.InputStreamReader;
 import java.io.StringReader;
 import java.io.StringReader;
+import java.lang.reflect.Method;
 import java.net.InetAddress;
 import java.net.InetAddress;
 import java.net.ServerSocket;
 import java.net.ServerSocket;
 import java.text.MessageFormat;
 import java.text.MessageFormat;
@@ -432,6 +433,17 @@ public class MiniKdc {
 
 
     System.setProperty("sun.security.krb5.debug", conf.getProperty(DEBUG,
     System.setProperty("sun.security.krb5.debug", conf.getProperty(DEBUG,
             "false"));
             "false"));
+
+    // refresh the config
+    Class<?> classRef;
+    if (System.getProperty("java.vendor").contains("IBM")) {
+      classRef = Class.forName("com.ibm.security.krb5.internal.Config");
+    } else {
+      classRef = Class.forName("sun.security.krb5.Config");
+    }
+    Method refreshMethod = classRef.getMethod("refresh", new Class[0]);
+    refreshMethod.invoke(classRef, new Object[0]);
+
     LOG.info("MiniKdc listening at port: {}", getPort());
     LOG.info("MiniKdc listening at port: {}", getPort());
     LOG.info("MiniKdc setting JVM krb5.conf to: {}",
     LOG.info("MiniKdc setting JVM krb5.conf to: {}",
             krb5conf.getAbsolutePath());
             krb5conf.getAbsolutePath());
@@ -514,7 +526,7 @@ public class MiniKdc {
   public void createPrincipal(File keytabFile, String ... principals)
   public void createPrincipal(File keytabFile, String ... principals)
           throws Exception {
           throws Exception {
     String generatedPassword = UUID.randomUUID().toString();
     String generatedPassword = UUID.randomUUID().toString();
-    HackedKeytab keytab = new HackedKeytab();
+    Keytab keytab = new Keytab();
     List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
     List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
     for (String principal : principals) {
     for (String principal : principals) {
       createPrincipal(principal, generatedPassword);
       createPrincipal(principal, generatedPassword);
@@ -529,6 +541,6 @@ public class MiniKdc {
       }
       }
     }
     }
     keytab.setEntries(entries);
     keytab.setEntries(entries);
-    keytab.write(keytabFile, principals.length);
+    keytab.write(keytabFile);
   }
   }
 }
 }

+ 7 - 3
hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java

@@ -30,7 +30,11 @@ import javax.security.auth.login.Configuration;
 import javax.security.auth.login.LoginContext;
 import javax.security.auth.login.LoginContext;
 import java.io.File;
 import java.io.File;
 import java.security.Principal;
 import java.security.Principal;
-import java.util.*;
+import java.util.Set;
+import java.util.Map;
+import java.util.HashSet;
+import java.util.HashMap;
+import java.util.Arrays;
 
 
 public class TestMiniKdc extends KerberosSecurityTestcase {
 public class TestMiniKdc extends KerberosSecurityTestcase {
 
 
@@ -137,7 +141,7 @@ public class TestMiniKdc extends KerberosSecurityTestcase {
               subject.getPrincipals().iterator().next().getClass());
               subject.getPrincipals().iterator().next().getClass());
       Assert.assertEquals(principal + "@" + kdc.getRealm(),
       Assert.assertEquals(principal + "@" + kdc.getRealm(),
               subject.getPrincipals().iterator().next().getName());
               subject.getPrincipals().iterator().next().getName());
-      loginContext.login();
+      loginContext.logout();
 
 
       //server login
       //server login
       subject = new Subject(false, principals, new HashSet<Object>(),
       subject = new Subject(false, principals, new HashSet<Object>(),
@@ -151,7 +155,7 @@ public class TestMiniKdc extends KerberosSecurityTestcase {
               subject.getPrincipals().iterator().next().getClass());
               subject.getPrincipals().iterator().next().getClass());
       Assert.assertEquals(principal + "@" + kdc.getRealm(),
       Assert.assertEquals(principal + "@" + kdc.getRealm(),
               subject.getPrincipals().iterator().next().getName());
               subject.getPrincipals().iterator().next().getName());
-      loginContext.login();
+      loginContext.logout();
 
 
     } finally {
     } finally {
       if (loginContext != null) {
       if (loginContext != null) {

+ 46 - 0
hadoop-common-project/hadoop-nfs/pom.xml

@@ -95,4 +95,50 @@
       <version>11.0.2</version>
       <version>11.0.2</version>
     </dependency>
     </dependency>
   </dependencies>
   </dependencies>
+
+
+  <profiles>
+    <profile>
+      <id>dist</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${project.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>dist</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <!--<descriptorRefs>
+                    <descriptorRef>hadoop-nfs-dist</descriptorRef>
+                  </descriptorRefs>-->
+                  <descriptors>
+                    <descriptor>../../hadoop-assemblies/src/main/resources/assemblies/hadoop-nfs-dist.xml</descriptor>
+                  </descriptors>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
 </project>
 </project>

+ 20 - 6
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java

@@ -19,9 +19,10 @@ package org.apache.hadoop.mount;
 
 
 import java.util.List;
 import java.util.List;
 
 
+import org.apache.hadoop.nfs.NfsExports;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.XDR;
 import org.apache.hadoop.oncrpc.XDR;
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
 
 
 /**
 /**
  * Helper class for sending MountResponse
  * Helper class for sending MountResponse
@@ -59,15 +60,28 @@ public class MountResponse {
     xdr.writeBoolean(false); // Value follows no
     xdr.writeBoolean(false); // Value follows no
     return xdr;
     return xdr;
   }
   }
-
+  
   /** Response for RPC call {@link MountInterface.MNTPROC#EXPORT} */
   /** Response for RPC call {@link MountInterface.MNTPROC#EXPORT} */
-  public static XDR writeExportList(XDR xdr, int xid, List<String> exports) {
+  public static XDR writeExportList(XDR xdr, int xid, List<String> exports,
+      List<NfsExports> hostMatcher) {
+    assert (exports.size() == hostMatcher.size());
+
     RpcAcceptedReply.voidReply(xdr, xid);
     RpcAcceptedReply.voidReply(xdr, xid);
-    for (String export : exports) {
+    for (int i = 0; i < exports.size(); i++) {
       xdr.writeBoolean(true); // Value follows - yes
       xdr.writeBoolean(true); // Value follows - yes
-      xdr.writeString(export);
-      xdr.writeInt(0);
+      xdr.writeString(exports.get(i));
+
+      // List host groups
+      String[] hostGroups = hostMatcher.get(i).getHostGroupList();
+      if (hostGroups.length > 0) {
+        for (int j = 0; j < hostGroups.length; j++) {
+          xdr.writeBoolean(true); // Value follows - yes
+          xdr.writeVariableOpaque(hostGroups[j].getBytes());
+        }
+      }
+      xdr.writeBoolean(false); // Value follows - no more group
     }
     }
+    
     xdr.writeBoolean(false); // Value follows - no
     xdr.writeBoolean(false); // Value follows - no
     return xdr;
     return xdr;
   }
   }

+ 24 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/AccessPrivilege.java

@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs;
+
+public enum AccessPrivilege {
+  READ_ONLY,
+  READ_WRITE,
+  NONE;
+}

+ 388 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java

@@ -0,0 +1,388 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.net.util.SubnetUtils;
+import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
+import org.apache.hadoop.util.LightWeightCache;
+import org.apache.hadoop.util.LightWeightGSet;
+import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * This class provides functionality for loading and checking the mapping 
+ * between client hosts and their access privileges.
+ */
+public class NfsExports {
+  
+  private static NfsExports exports = null;
+  
+  public static synchronized NfsExports getInstance(Configuration conf) {
+    if (exports == null) {
+      String matchHosts = conf.get(Nfs3Constant.EXPORTS_ALLOWED_HOSTS_KEY,
+          Nfs3Constant.EXPORTS_ALLOWED_HOSTS_KEY_DEFAULT);
+      int cacheSize = conf.getInt(Nfs3Constant.EXPORTS_CACHE_SIZE_KEY,
+          Nfs3Constant.EXPORTS_CACHE_SIZE_DEFAULT);
+      long expirationPeriodNano = conf.getLong(
+          Nfs3Constant.EXPORTS_CACHE_EXPIRYTIME_MILLIS_KEY,
+          Nfs3Constant.EXPORTS_CACHE_EXPIRYTIME_MILLIS_DEFAULT) * 1000 * 1000;
+      exports = new NfsExports(cacheSize, expirationPeriodNano, matchHosts);
+    }
+    return exports;
+  }
+  
+  public static final Log LOG = LogFactory.getLog(NfsExports.class);
+  
+  // only support IPv4 now
+  private static final String IP_ADDRESS = 
+      "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})";
+  private static final String SLASH_FORMAT_SHORT = IP_ADDRESS + "/(\\d{1,3})";
+  private static final String SLASH_FORMAT_LONG = IP_ADDRESS + "/" + IP_ADDRESS;
+  
+  private static final Pattern CIDR_FORMAT_SHORT = 
+      Pattern.compile(SLASH_FORMAT_SHORT);
+  
+  private static final Pattern CIDR_FORMAT_LONG = 
+      Pattern.compile(SLASH_FORMAT_LONG);
+  
+  static class AccessCacheEntry implements LightWeightCache.Entry{
+    private final String hostAddr;
+    private AccessPrivilege access;
+    private final long expirationTime; 
+    
+    private LightWeightGSet.LinkedElement next;
+    
+    AccessCacheEntry(String hostAddr, AccessPrivilege access,
+        long expirationTime) {
+      Preconditions.checkArgument(hostAddr != null);
+      this.hostAddr = hostAddr;
+      this.access = access;
+      this.expirationTime = expirationTime;
+    }
+    
+    @Override
+    public int hashCode() {
+      return hostAddr.hashCode();
+    }
+    
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj) {
+        return true;
+      }
+      if (obj instanceof AccessCacheEntry) {
+        AccessCacheEntry entry = (AccessCacheEntry) obj;
+        return this.hostAddr.equals(entry.hostAddr);
+      }
+      return false;
+    }
+    
+    @Override
+    public void setNext(LinkedElement next) {
+      this.next = next;
+    }
+
+    @Override
+    public LinkedElement getNext() {
+      return this.next;
+    }
+
+    @Override
+    public void setExpirationTime(long timeNano) {
+      // we set expiration time in the constructor, and the expiration time 
+      // does not change
+    }
+
+    @Override
+    public long getExpirationTime() {
+      return this.expirationTime;
+    }
+  }
+
+  private final List<Match> mMatches;
+  
+  private final LightWeightCache<AccessCacheEntry, AccessCacheEntry> accessCache;
+  private final long cacheExpirationPeriod;
+
+  /**
+   * Constructor.
+   * @param cacheSize The size of the access privilege cache.
+   * @param expirationPeriodNano The period 
+   * @param matchingHosts A string specifying one or multiple matchers. 
+   */
+  NfsExports(int cacheSize, long expirationPeriodNano, String matchHosts) {
+    this.cacheExpirationPeriod = expirationPeriodNano;
+    accessCache = new LightWeightCache<AccessCacheEntry, AccessCacheEntry>(
+        cacheSize, cacheSize, expirationPeriodNano, 0);        
+    String[] matchStrings = matchHosts.split(
+        Nfs3Constant.EXPORTS_ALLOWED_HOSTS_SEPARATOR);
+    mMatches = new ArrayList<Match>(matchStrings.length);
+    for(String mStr : matchStrings) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Processing match string '" + mStr + "'");
+      }
+      mStr = mStr.trim();
+      if(!mStr.isEmpty()) {
+        mMatches.add(getMatch(mStr));
+      }
+    }
+  }
+  
+  /**
+   * Return the configured group list
+   */
+  public String[] getHostGroupList() {
+    int listSize = mMatches.size();
+    String[] hostGroups = new String[listSize];
+
+    for (int i = 0; i < mMatches.size(); i++) {
+      hostGroups[i] = mMatches.get(i).getHostGroup();
+    }
+    return hostGroups;
+  }
+  
+  public AccessPrivilege getAccessPrivilege(InetAddress addr) {
+    return getAccessPrivilege(addr.getHostAddress(),
+        addr.getCanonicalHostName());
+  }
+  
+  AccessPrivilege getAccessPrivilege(String address, String hostname) {
+    long now = System.nanoTime();
+    AccessCacheEntry newEntry = new AccessCacheEntry(address,
+        AccessPrivilege.NONE, now + this.cacheExpirationPeriod);
+    // check if there is a cache entry for the given address
+    AccessCacheEntry cachedEntry = accessCache.get(newEntry);
+    if (cachedEntry != null && now < cachedEntry.expirationTime) {
+      // get a non-expired cache entry, use it
+      return cachedEntry.access;
+    } else {
+      for(Match match : mMatches) {
+        if(match.isIncluded(address, hostname)) {
+          if (match.accessPrivilege == AccessPrivilege.READ_ONLY) {
+            newEntry.access = AccessPrivilege.READ_ONLY;
+            break;
+          } else if (match.accessPrivilege == AccessPrivilege.READ_WRITE) {
+            newEntry.access = AccessPrivilege.READ_WRITE;
+          }
+        }
+      }
+      accessCache.put(newEntry);
+      return newEntry.access;
+    }
+  }
+
+  private static abstract class Match {
+    private final AccessPrivilege accessPrivilege;
+
+    private Match(AccessPrivilege accessPrivilege) {
+      this.accessPrivilege = accessPrivilege;
+    }
+
+    public abstract boolean isIncluded(String address, String hostname);
+    public abstract String getHostGroup();
+  }
+  
+  /**
+   * Matcher covering all client hosts (specified by "*")
+   */
+  private static class AnonymousMatch extends Match {
+    private AnonymousMatch(AccessPrivilege accessPrivilege) {
+      super(accessPrivilege);
+    }
+  
+    @Override
+    public boolean isIncluded(String address, String hostname) {
+      return true;
+    }
+
+    @Override
+    public String getHostGroup() {
+      return "*";
+    }
+  }
+  
+  /**
+   * Matcher using CIDR for client host matching
+   */
+  private static class CIDRMatch extends Match {
+    private final SubnetInfo subnetInfo;
+    
+    private CIDRMatch(AccessPrivilege accessPrivilege, SubnetInfo subnetInfo) {
+      super(accessPrivilege);
+      this.subnetInfo = subnetInfo;
+    }
+    
+    @Override
+    public boolean isIncluded(String address, String hostname) {
+      if(subnetInfo.isInRange(address)) {
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("CIDRNMatcher low = " + subnetInfo.getLowAddress() +
+              ", high = " + subnetInfo.getHighAddress() +
+              ", allowing client '" + address + "', '" + hostname + "'");
+        }
+        return true;
+      }
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("CIDRNMatcher low = " + subnetInfo.getLowAddress() +
+            ", high = " + subnetInfo.getHighAddress() +
+            ", denying client '" + address + "', '" + hostname + "'");
+      }
+      return false;
+    }
+
+    @Override
+    public String getHostGroup() {
+      return subnetInfo.getAddress() + "/" + subnetInfo.getNetmask();
+    }
+  }
+  
+  /**
+   * Matcher requiring exact string match for client host
+   */
+  private static class ExactMatch extends Match {
+    private final String ipOrHost;
+    
+    private ExactMatch(AccessPrivilege accessPrivilege, String ipOrHost) {
+      super(accessPrivilege);
+      this.ipOrHost = ipOrHost;
+    }
+    
+    @Override
+    public boolean isIncluded(String address, String hostname) {
+      if(ipOrHost.equalsIgnoreCase(address) ||
+          ipOrHost.equalsIgnoreCase(hostname)) {
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("ExactMatcher '" + ipOrHost + "', allowing client " +
+              "'" + address + "', '" + hostname + "'");
+        }
+        return true;
+      }
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("ExactMatcher '" + ipOrHost + "', denying client " +
+            "'" + address + "', '" + hostname + "'");
+      }
+      return false;
+    }
+
+    @Override
+    public String getHostGroup() {
+      return ipOrHost;
+    }
+  }
+
+  /**
+   * Matcher where client hosts are specified by regular expression
+   */
+  private static class RegexMatch extends Match {
+    private final Pattern pattern;
+
+    private RegexMatch(AccessPrivilege accessPrivilege, String wildcard) {
+      super(accessPrivilege);
+      this.pattern = Pattern.compile(wildcard, Pattern.CASE_INSENSITIVE);
+    }
+
+    @Override
+    public boolean isIncluded(String address, String hostname) {
+      if (pattern.matcher(address).matches()
+          || pattern.matcher(hostname).matches()) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("RegexMatcher '" + pattern.pattern()
+              + "', allowing client '" + address + "', '" + hostname + "'");
+        }
+        return true;
+      }
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("RegexMatcher '" + pattern.pattern()
+            + "', denying client '" + address + "', '" + hostname + "'");
+      }
+      return false;
+    }
+
+    @Override
+    public String getHostGroup() {
+      return pattern.toString();
+    }
+  }
+
+  /**
+   * Loading a matcher from a string. The default access privilege is read-only.
+   * The string contains 1 or 2 parts, separated by whitespace characters, where
+   * the first part specifies the client hosts, and the second part (if 
+   * existent) specifies the access privilege of the client hosts. I.e.,
+   * 
+   * "client-hosts [access-privilege]"
+   */
+  private static Match getMatch(String line) {
+    String[] parts = line.split("\\s+");
+    final String host;
+    AccessPrivilege privilege = AccessPrivilege.READ_ONLY;
+    switch (parts.length) {
+    case 1:
+      host = parts[0].toLowerCase().trim();
+      break;
+    case 2:
+      host = parts[0].toLowerCase().trim();
+      String option = parts[1].trim();
+      if ("rw".equalsIgnoreCase(option)) {
+        privilege = AccessPrivilege.READ_WRITE;
+      }
+      break;
+    default:
+      throw new IllegalArgumentException("Incorrectly formatted line '" + line
+          + "'");
+    }
+    if (host.equals("*")) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using match all for '" + host + "' and " + privilege);
+      }
+      return new AnonymousMatch(privilege);
+    } else if (CIDR_FORMAT_SHORT.matcher(host).matches()) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using CIDR match for '" + host + "' and " + privilege);
+      }
+      return new CIDRMatch(privilege, new SubnetUtils(host).getInfo());
+    } else if (CIDR_FORMAT_LONG.matcher(host).matches()) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using CIDR match for '" + host + "' and " + privilege);
+      }
+      String[] pair = host.split("/");
+      return new CIDRMatch(privilege,
+          new SubnetUtils(pair[0], pair[1]).getInfo());
+    } else if (host.contains("*") || host.contains("?") || host.contains("[")
+        || host.contains("]")) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using Regex match for '" + host + "' and " + privilege);
+      }
+      return new RegexMatch(privilege, host);
+    }
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Using exact match for '" + host + "' and " + privilege);
+    }
+    return new ExactMatch(privilege, host);
+  }
+}

+ 2 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/IdUserGroup.java

@@ -147,7 +147,7 @@ public class IdUserGroup {
 
 
   synchronized public String getUserName(int uid, String unknown) {
   synchronized public String getUserName(int uid, String unknown) {
     checkAndUpdateMaps();
     checkAndUpdateMaps();
-    String uname = uidNameMap.get(Integer.valueOf(uid));
+    String uname = uidNameMap.get(uid);
     if (uname == null) {
     if (uname == null) {
       uname = unknown;
       uname = unknown;
     }
     }
@@ -156,7 +156,7 @@ public class IdUserGroup {
 
 
   synchronized public String getGroupName(int gid, String unknown) {
   synchronized public String getGroupName(int gid, String unknown) {
     checkAndUpdateMaps();
     checkAndUpdateMaps();
-    String gname = gidNameMap.get(Integer.valueOf(gid));
+    String gname = gidNameMap.get(gid);
     if (gname == null) {
     if (gname == null) {
       gname = unknown;
       gname = unknown;
     }
     }

+ 19 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Constant.java

@@ -189,4 +189,23 @@ public class Nfs3Constant {
   public final static int CREATE_UNCHECKED = 0;
   public final static int CREATE_UNCHECKED = 0;
   public final static int CREATE_GUARDED = 1;
   public final static int CREATE_GUARDED = 1;
   public final static int CREATE_EXCLUSIVE = 2;
   public final static int CREATE_EXCLUSIVE = 2;
+  
+  public static final String EXPORTS_ALLOWED_HOSTS_SEPARATOR = ";";
+  /** Allowed hosts for nfs exports */
+  public static final String EXPORTS_ALLOWED_HOSTS_KEY = "dfs.nfs.exports.allowed.hosts";
+  public static final String EXPORTS_ALLOWED_HOSTS_KEY_DEFAULT = "* rw";
+  /** Size for nfs exports cache */
+  public static final String EXPORTS_CACHE_SIZE_KEY = "dfs.nfs.exports.cache.size";
+  public static final int EXPORTS_CACHE_SIZE_DEFAULT = 512;
+  /** Expiration time for nfs exports cache entry */
+  public static final String EXPORTS_CACHE_EXPIRYTIME_MILLIS_KEY = "dfs.nfs.exports.cache.expirytime.millis";
+  public static final long EXPORTS_CACHE_EXPIRYTIME_MILLIS_DEFAULT = 15 * 60 * 1000; // 15 min
+
+  public static final String FILE_DUMP_DIR_KEY = "dfs.nfs3.dump.dir";
+  public static final String FILE_DUMP_DIR_DEFAULT = "/tmp/.hdfs-nfs";
+  public static final String ENABLE_FILE_DUMP_KEY = "dfs.nfs3.enableDump";
+  public static final boolean ENABLE_FILE_DUMP_DEFAULT = true;
+  
+  public final static String UNKNOWN_USER = "nobody";
+  public final static String UNKNOWN_GROUP = "nobody";
 }
 }

+ 63 - 44
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Interface.java

@@ -17,67 +17,86 @@
  */
  */
 package org.apache.hadoop.nfs.nfs3;
 package org.apache.hadoop.nfs.nfs3;
 
 
+import java.net.InetAddress;
+
 import org.apache.hadoop.nfs.nfs3.response.NFS3Response;
 import org.apache.hadoop.nfs.nfs3.response.NFS3Response;
-import org.apache.hadoop.oncrpc.RpcAuthSys;
 import org.apache.hadoop.oncrpc.XDR;
 import org.apache.hadoop.oncrpc.XDR;
+import org.apache.hadoop.oncrpc.security.SecurityHandler;
 import org.jboss.netty.channel.Channel;
 import org.jboss.netty.channel.Channel;
 
 
 /**
 /**
  * RPC procedures as defined in RFC 1813.
  * RPC procedures as defined in RFC 1813.
  */
  */
 public interface Nfs3Interface {
 public interface Nfs3Interface {
-  
+
   /** NULL: Do nothing */
   /** NULL: Do nothing */
   public NFS3Response nullProcedure();
   public NFS3Response nullProcedure();
-  
+
   /** GETATTR: Get file attributes */
   /** GETATTR: Get file attributes */
-  public NFS3Response getattr(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response getattr(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** SETATTR: Set file attributes */
   /** SETATTR: Set file attributes */
-  public NFS3Response setattr(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response setattr(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** LOOKUP: Lookup filename */
   /** LOOKUP: Lookup filename */
-  public NFS3Response lookup(XDR xdr, RpcAuthSys authSys);
-  
-  /** ACCESS: Check access permission  */
-  public NFS3Response access(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response lookup(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** ACCESS: Check access permission */
+  public NFS3Response access(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** READ: Read from file */
   /** READ: Read from file */
-  public NFS3Response read(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response read(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** WRITE: Write to file */
   /** WRITE: Write to file */
-  public NFS3Response write(XDR xdr, Channel channel, int xid, RpcAuthSys authSys);
-  
-  /** CREATE: Create a file  */
-  public NFS3Response create(XDR xdr, RpcAuthSys authSys);
-  
-  /** MKDIR: Create a directory  */
-  public NFS3Response mkdir(XDR xdr, RpcAuthSys authSys);
-  
-  /** REMOVE: Remove a file  */
-  public NFS3Response remove(XDR xdr, RpcAuthSys authSys);
-  
-  /** RMDIR: Remove a directory  */
-  public NFS3Response rmdir(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response write(XDR xdr, Channel channel, int xid,
+      SecurityHandler securityHandler, InetAddress client);
+
+  /** CREATE: Create a file */
+  public NFS3Response create(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** MKDIR: Create a directory */
+  public NFS3Response mkdir(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** REMOVE: Remove a file */
+  public NFS3Response remove(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** RMDIR: Remove a directory */
+  public NFS3Response rmdir(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** RENAME: Rename a file or directory */
   /** RENAME: Rename a file or directory */
-  public NFS3Response rename(XDR xdr, RpcAuthSys authSys);
-  
-  /** SYMLINK: Create a symbolic link  */
-  public NFS3Response symlink(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response rename(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** SYMLINK: Create a symbolic link */
+  public NFS3Response symlink(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** READDIR: Read From directory */
   /** READDIR: Read From directory */
-  public NFS3Response readdir(XDR xdr, RpcAuthSys authSys);
-  
-  /** FSSTAT: Get dynamic file system information  */
-  public NFS3Response fsstat(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response readdir(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** FSSTAT: Get dynamic file system information */
+  public NFS3Response fsstat(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** FSINFO: Get static file system information */
   /** FSINFO: Get static file system information */
-  public NFS3Response fsinfo(XDR xdr, RpcAuthSys authSys);
-  
+  public NFS3Response fsinfo(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
   /** PATHCONF: Retrieve POSIX information */
   /** PATHCONF: Retrieve POSIX information */
-  public NFS3Response pathconf(XDR xdr, RpcAuthSys authSys);
-  
-  /** COMMIT: Commit cached data on a server to stable storage  */
-  public NFS3Response commit(XDR xdr, RpcAuthSys authSys);
+  public NFS3Response pathconf(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
+
+  /** COMMIT: Commit cached data on a server to stable storage */
+  public NFS3Response commit(XDR xdr, SecurityHandler securityHandler,
+      InetAddress client);
 }
 }

+ 10 - 10
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java

@@ -17,12 +17,14 @@
  */
  */
 package org.apache.hadoop.nfs.nfs3.response;
 package org.apache.hadoop.nfs.nfs3.response;
 
 
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
 import org.apache.hadoop.nfs.nfs3.Nfs3Status;
 import org.apache.hadoop.nfs.nfs3.Nfs3Status;
 import org.apache.hadoop.oncrpc.XDR;
 import org.apache.hadoop.oncrpc.XDR;
 
 
-import com.google.common.collect.ObjectArrays;
-
 /**
 /**
  * READDIR3 Response
  * READDIR3 Response
  */
  */
@@ -56,12 +58,11 @@ public class READDIR3Response extends NFS3Response {
   }
   }
 
 
   public static class DirList3 {
   public static class DirList3 {
-    final Entry3 entries[];
+    final List<Entry3> entries;
     final boolean eof;
     final boolean eof;
     
     
     public DirList3(Entry3[] entries, boolean eof) {
     public DirList3(Entry3[] entries, boolean eof) {
-      this.entries = ObjectArrays.newArray(entries, entries.length);
-      System.arraycopy(this.entries, 0, entries, 0, entries.length);
+      this.entries = Collections.unmodifiableList(Arrays.asList(entries));
       this.eof = eof;
       this.eof = eof;
     }
     }
   }
   }
@@ -102,12 +103,11 @@ public class READDIR3Response extends NFS3Response {
 
 
     if (getStatus() == Nfs3Status.NFS3_OK) {
     if (getStatus() == Nfs3Status.NFS3_OK) {
       xdr.writeLongAsHyper(cookieVerf);
       xdr.writeLongAsHyper(cookieVerf);
-      Entry3[] f = dirList.entries;
-      for (int i = 0; i < f.length; i++) {
+      for (Entry3 e : dirList.entries) {
         xdr.writeBoolean(true); // Value follows
         xdr.writeBoolean(true); // Value follows
-        xdr.writeLongAsHyper(f[i].getFileId());
-        xdr.writeString(f[i].getName());
-        xdr.writeLongAsHyper(f[i].getCookie());
+        xdr.writeLongAsHyper(e.getFileId());
+        xdr.writeString(e.getName());
+        xdr.writeLongAsHyper(e.getCookie());
       }
       }
 
 
       xdr.writeBoolean(false);
       xdr.writeBoolean(false);

+ 9 - 9
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java

@@ -17,13 +17,15 @@
  */
  */
 package org.apache.hadoop.nfs.nfs3.response;
 package org.apache.hadoop.nfs.nfs3.response;
 
 
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
 import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
 import org.apache.hadoop.nfs.nfs3.Nfs3Status;
 import org.apache.hadoop.nfs.nfs3.Nfs3Status;
 import org.apache.hadoop.oncrpc.XDR;
 import org.apache.hadoop.oncrpc.XDR;
 
 
-import com.google.common.collect.ObjectArrays;
-
 /**
 /**
  * READDIRPLUS3 Response
  * READDIRPLUS3 Response
  */
  */
@@ -60,16 +62,15 @@ public class READDIRPLUS3Response  extends NFS3Response {
   }
   }
 
 
   public static class DirListPlus3 {
   public static class DirListPlus3 {
-    EntryPlus3 entries[];
+    List<EntryPlus3> entries;
     boolean eof;
     boolean eof;
     
     
     public DirListPlus3(EntryPlus3[] entries, boolean eof) {
     public DirListPlus3(EntryPlus3[] entries, boolean eof) {
-      this.entries = ObjectArrays.newArray(entries, entries.length);
-      System.arraycopy(this.entries, 0, entries, 0, entries.length);
+      this.entries = Collections.unmodifiableList(Arrays.asList(entries));
       this.eof = eof;
       this.eof = eof;
     }
     }
 
 
-    EntryPlus3[] getEntries() {
+    List<EntryPlus3> getEntries() {
       return entries;
       return entries;
     }
     }
     
     
@@ -101,10 +102,9 @@ public class READDIRPLUS3Response  extends NFS3Response {
     
     
     if (getStatus() == Nfs3Status.NFS3_OK) {
     if (getStatus() == Nfs3Status.NFS3_OK) {
       out.writeLongAsHyper(cookieVerf);
       out.writeLongAsHyper(cookieVerf);
-      EntryPlus3[] f = dirListPlus.getEntries();
-      for (int i = 0; i < f.length; i++) {
+      for (EntryPlus3 f : dirListPlus.getEntries()) {
         out.writeBoolean(true); // next
         out.writeBoolean(true); // next
-        f[i].seralize(out);
+        f.seralize(out);
       }
       }
 
 
       out.writeBoolean(false);
       out.writeBoolean(false);

+ 4 - 2
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java

@@ -17,7 +17,9 @@
  */
  */
 package org.apache.hadoop.oncrpc;
 package org.apache.hadoop.oncrpc;
 
 
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
+import org.apache.hadoop.oncrpc.security.Verifier;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
 
 
 /** 
 /** 
  * Represents RPC message MSG_ACCEPTED reply body. See RFC 1831 for details.
  * Represents RPC message MSG_ACCEPTED reply body. See RFC 1831 for details.
@@ -54,7 +56,7 @@ public class RpcAcceptedReply extends RpcReply {
 
 
   public static RpcAcceptedReply read(int xid, RpcMessage.Type messageType,
   public static RpcAcceptedReply read(int xid, RpcMessage.Type messageType,
       ReplyState replyState, XDR xdr) {
       ReplyState replyState, XDR xdr) {
-    RpcAuthInfo verifier = RpcAuthInfo.read(xdr);
+    Verifier verifier = Verifier.readFlavorAndVerifier(xdr);
     AcceptState acceptState = AcceptState.fromValue(xdr.readInt());
     AcceptState acceptState = AcceptState.fromValue(xdr.readInt());
     return new RpcAcceptedReply(xid, messageType, replyState, verifier,
     return new RpcAcceptedReply(xid, messageType, replyState, verifier,
         acceptState);
         acceptState);

+ 12 - 9
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java

@@ -19,6 +19,8 @@ package org.apache.hadoop.oncrpc;
 
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.oncrpc.security.Credentials;
+import org.apache.hadoop.oncrpc.security.Verifier;
 
 
 /**
 /**
  * Represents an RPC message of type RPC call as defined in RFC 1831
  * Represents an RPC message of type RPC call as defined in RFC 1831
@@ -30,11 +32,12 @@ public class RpcCall extends RpcMessage {
   private final int program;
   private final int program;
   private final int version;
   private final int version;
   private final int procedure;
   private final int procedure;
-  private final RpcAuthInfo credential;
-  private final RpcAuthInfo verifier;
+  private final Credentials credential;
+  private final Verifier verifier;
 
 
-  protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion, int program,
-      int version, int procedure, RpcAuthInfo credential, RpcAuthInfo verifier) {
+  protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion,
+      int program, int version, int procedure, Credentials credential,
+      Verifier verifier) {
     super(xid, messageType);
     super(xid, messageType);
     this.rpcVersion = rpcVersion;
     this.rpcVersion = rpcVersion;
     this.program = program;
     this.program = program;
@@ -79,19 +82,19 @@ public class RpcCall extends RpcMessage {
     return procedure;
     return procedure;
   }
   }
   
   
-  public RpcAuthInfo getCredential() {
+  public Credentials getCredential() {
     return credential;
     return credential;
   }
   }
 
 
-  public RpcAuthInfo getVerifier() {
+  public Verifier getVerifier() {
     return verifier;
     return verifier;
   }
   }
   
   
   public static RpcCall read(XDR xdr) {
   public static RpcCall read(XDR xdr) {
     return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
     return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
-        xdr.readInt(), xdr.readInt(),
-        xdr.readInt(), xdr.readInt(), RpcAuthInfo.read(xdr),
-        RpcAuthInfo.read(xdr));
+        xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(), 
+        Credentials.readFlavorAndCredentials(xdr),
+        Verifier.readFlavorAndVerifier(xdr));
   }
   }
   
   
   public static void write(XDR out, int xid, int program, int progVersion,
   public static void write(XDR out, int xid, int program, int progVersion,

+ 1 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java

@@ -17,7 +17,7 @@
  */
  */
 package org.apache.hadoop.oncrpc;
 package org.apache.hadoop.oncrpc;
 
 
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
 
 
 /** 
 /** 
  * Represents RPC message MSG_DENIED reply body. See RFC 1831 for details.
  * Represents RPC message MSG_DENIED reply body. See RFC 1831 for details.

+ 1 - 1
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java

@@ -280,7 +280,7 @@ public class XDR {
 
 
   public byte[] readVariableOpaque() {
   public byte[] readVariableOpaque() {
     int size = this.readInt();
     int size = this.readInt();
-    return size != 0 ? this.readFixedOpaque(size) : null;
+    return size != 0 ? this.readFixedOpaque(size) : new byte[0];
   }
   }
 
 
   public void skipVariableOpaque() {
   public void skipVariableOpaque() {

+ 53 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java

@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * Base class for all credentials. Currently we only support 3 different types
+ * of auth flavors: AUTH_NONE, AUTH_SYS, and RPCSEC_GSS.
+ */
+public abstract class Credentials extends RpcAuthInfo {
+  public static final Log LOG = LogFactory.getLog(Credentials.class);
+
+  public static Credentials readFlavorAndCredentials(XDR xdr) {
+    AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());
+    final Credentials credentials;
+    if(flavor == AuthFlavor.AUTH_NONE) {
+      credentials = new CredentialsNone();
+    } else if(flavor == AuthFlavor.AUTH_SYS) {
+      credentials = new CredentialsSys();
+    } else if(flavor == AuthFlavor.RPCSEC_GSS) {
+      credentials = new CredentialsGSS();
+    } else {
+      throw new UnsupportedOperationException("Unsupported Credentials Flavor "
+          + flavor);
+    }
+    credentials.read(xdr);
+    return credentials;
+  }
+  
+  protected int mCredentialsLength;
+  
+  protected Credentials(AuthFlavor flavor) {
+    super(flavor);
+  }
+}

+ 15 - 25
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAuthSys.java → hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsGSS.java

@@ -15,37 +15,27 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.hadoop.oncrpc;
+package org.apache.hadoop.oncrpc.security;
 
 
-/**
- * AUTH_SYS as defined in RFC 1831
- */
-public class RpcAuthSys {
-  private final int uid;
-  private final int gid;
+import org.apache.hadoop.oncrpc.XDR;
 
 
-  public RpcAuthSys(int uid, int gid) {
-    this.uid = uid;
-    this.gid = gid;
-  }
-  
-  public static RpcAuthSys from(byte[] credentials) {
-    XDR sys = new XDR(credentials);
-    sys.skip(4); // Stamp
-    sys.skipVariableOpaque(); // Machine name
-    return new RpcAuthSys(sys.readInt(), sys.readInt());
-  }
-  
-  public int getUid() {
-    return uid;
+/** Credential used by RPCSEC_GSS */
+public class CredentialsGSS extends Credentials {
+
+  public CredentialsGSS() {
+    super(AuthFlavor.RPCSEC_GSS);
   }
   }
 
 
-  public int getGid() {
-    return gid;
+  @Override
+  public void read(XDR xdr) {
+    // TODO Auto-generated method stub
+    
   }
   }
 
 
   @Override
   @Override
-  public String toString() {
-    return "(AuthSys: uid=" + uid + " gid=" + gid + ")";
+  public void write(XDR xdr) {
+    // TODO Auto-generated method stub
+    
   }
   }
+
 }
 }

+ 43 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsNone.java

@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+import com.google.common.base.Preconditions;
+
+/** Credential used by AUTH_NONE */
+public class CredentialsNone extends Credentials {
+
+  public CredentialsNone() {
+    super(AuthFlavor.AUTH_NONE);
+    mCredentialsLength = 0;
+  }
+
+  @Override
+  public void read(XDR xdr) {
+    mCredentialsLength = xdr.readInt();
+    Preconditions.checkState(mCredentialsLength == 0);
+  }
+
+  @Override
+  public void write(XDR xdr) {
+    Preconditions.checkState(mCredentialsLength == 0);
+    xdr.writeInt(mCredentialsLength);
+  }
+}

+ 114 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java

@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/** Credential used by AUTH_SYS */
+public class CredentialsSys extends Credentials {
+ 
+  private static final String HOSTNAME;
+  static {
+    try {
+      String s = InetAddress.getLocalHost().getHostName();
+      HOSTNAME = s;
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("HOSTNAME = " + HOSTNAME);
+      }
+    } catch (UnknownHostException e) {
+      LOG.error("Error setting HOSTNAME", e);
+      throw new RuntimeException(e);
+    }
+  }
+  
+  protected int mUID, mGID;
+  protected int[] mAuxGIDs;
+  protected String mHostName;
+  protected int mStamp;
+
+  public CredentialsSys() {
+    super(AuthFlavor.AUTH_SYS);
+    this.mCredentialsLength = 0;
+    this.mHostName = HOSTNAME;
+  }
+  
+  public int getGID() {
+    return mGID;
+  }
+
+  public int getUID() {
+    return mUID;
+  }
+
+  public void setGID(int gid) {
+    this.mGID = gid;
+  }
+
+  public void setUID(int uid) {
+    this.mUID = uid;
+  }
+  
+  public void setStamp(int stamp) {
+    this.mStamp = stamp;
+  }
+
+  @Override
+  public void read(XDR xdr) {
+    mCredentialsLength = xdr.readInt();
+
+    mStamp = xdr.readInt();
+    mHostName = xdr.readString();
+    mUID = xdr.readInt();
+    mGID = xdr.readInt();
+
+    int length = xdr.readInt();
+    mAuxGIDs = new int[length];
+    for (int i = 0; i < length; i++) {
+      mAuxGIDs[i] = xdr.readInt();
+    }
+  }
+
+  @Override
+  public void write(XDR xdr) {
+    // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count
+    mCredentialsLength = 20 + mHostName.getBytes().length;
+    // mAuxGIDs
+    if (mAuxGIDs != null && mAuxGIDs.length > 0) {
+      mCredentialsLength += mAuxGIDs.length * 4;
+    }
+    xdr.writeInt(mCredentialsLength);
+    
+    xdr.writeInt(mStamp);
+    xdr.writeString(mHostName);
+    xdr.writeInt(mUID);
+    xdr.writeInt(mGID);
+    
+    if((mAuxGIDs == null) || (mAuxGIDs.length == 0)) {
+      xdr.writeInt(0);
+    } else {
+      xdr.writeInt(mAuxGIDs.length);
+      for (int i = 0; i < mAuxGIDs.length; i++) {
+        xdr.writeInt(mAuxGIDs[i]);
+      }
+    }
+  }
+
+}

+ 10 - 17
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAuthInfo.java → hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/RpcAuthInfo.java

@@ -15,14 +15,14 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.hadoop.oncrpc;
+package org.apache.hadoop.oncrpc.security;
 
 
-import java.util.Arrays;
+import org.apache.hadoop.oncrpc.XDR;
 
 
 /**
 /**
- *  Authentication Info as defined in RFC 1831
+ *  Authentication Info. Base class of Verifier and Credential.
  */
  */
-public class RpcAuthInfo {
+public abstract class RpcAuthInfo {
   /** Different types of authentication as defined in RFC 1831 */
   /** Different types of authentication as defined in RFC 1831 */
   public enum AuthFlavor {
   public enum AuthFlavor {
     AUTH_NONE(0),
     AUTH_NONE(0),
@@ -52,27 +52,20 @@ public class RpcAuthInfo {
   }
   }
   
   
   private final AuthFlavor flavor;
   private final AuthFlavor flavor;
-  private final byte[] body;
   
   
-  protected RpcAuthInfo(AuthFlavor flavor, byte[] body) {
+  protected RpcAuthInfo(AuthFlavor flavor) {
     this.flavor = flavor;
     this.flavor = flavor;
-    this.body = body;
   }
   }
   
   
-  public static RpcAuthInfo read(XDR xdr) {
-    int type = xdr.readInt();
-    AuthFlavor flavor = AuthFlavor.fromValue(type);
-    byte[] body = xdr.readVariableOpaque();
-    return new RpcAuthInfo(flavor, body);
-  }
+  /** Load auth info */
+  public abstract void read(XDR xdr);
+  
+  /** Write auth info */
+  public abstract void write(XDR xdr);
   
   
   public AuthFlavor getFlavor() {
   public AuthFlavor getFlavor() {
     return flavor;
     return flavor;
   }
   }
-
-  public byte[] getBody() {
-    return Arrays.copyOf(body, body.length);
-  }
   
   
   @Override
   @Override
   public String toString() {
   public String toString() {

+ 63 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java

@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.oncrpc.RpcCall;
+import org.apache.hadoop.oncrpc.XDR;
+
+public abstract class SecurityHandler {
+  public static final Log LOG = LogFactory.getLog(SecurityHandler.class);
+  
+  public abstract String getUser();
+
+  public abstract boolean shouldSilentlyDrop(RpcCall request);
+
+  public abstract Verifier getVerifer(RpcCall request) throws IOException;
+
+  public boolean isUnwrapRequired() {
+    return false;
+  }
+
+  public boolean isWrapRequired() {
+    return false;
+  }
+
+  /** Used by GSS */
+  public XDR unwrap(RpcCall request, byte[] data ) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+  
+  /** Used by GSS */
+  public byte[] wrap(RpcCall request, XDR response) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+  
+  /** Used by AUTH_SYS */
+  public int getUid() {
+    throw new UnsupportedOperationException();
+  }
+  
+  /** Used by AUTH_SYS */
+  public int getGid() {
+    throw new UnsupportedOperationException();
+  }
+}

+ 59 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SysSecurityHandler.java

@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.hadoop.nfs.nfs3.IdUserGroup;
+import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
+import org.apache.hadoop.oncrpc.RpcCall;
+
+public class SysSecurityHandler extends SecurityHandler {
+  
+  private final IdUserGroup iug;
+  private final CredentialsSys mCredentialsSys;
+  
+  public SysSecurityHandler(CredentialsSys credentialsSys,
+      IdUserGroup iug) {
+    this.mCredentialsSys = credentialsSys;
+    this.iug = iug;
+  }
+  
+  @Override
+  public String getUser() {
+    return iug.getUserName(mCredentialsSys.getUID(), Nfs3Constant.UNKNOWN_USER);
+  }
+
+  @Override
+  public boolean shouldSilentlyDrop(RpcCall request) {
+    return false;
+  }
+
+  @Override
+  public VerifierNone getVerifer(RpcCall request) {
+    return new VerifierNone();
+  }
+  
+  @Override
+  public int getUid() {
+    return mCredentialsSys.getUID();
+  }
+  
+  @Override
+  public int getGid() {
+    return mCredentialsSys.getGID();
+  }
+}

+ 49 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java

@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.hadoop.oncrpc.XDR;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
+
+/** 
+ * Base class for verifier. Currently we only support 3 types of auth flavors: 
+ * {@link AuthFlavor#AUTH_NONE}, {@link AuthFlavor#AUTH_SYS}, 
+ * and {@link AuthFlavor#RPCSEC_GSS}.
+ */
+public abstract class Verifier extends RpcAuthInfo {
+
+  protected Verifier(AuthFlavor flavor) {
+    super(flavor);
+  }
+
+  public static Verifier readFlavorAndVerifier(XDR xdr) {
+    AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());
+    final Verifier verifer;
+    if(flavor == AuthFlavor.AUTH_NONE) {
+      verifer = new VerifierNone();
+    } else if(flavor == AuthFlavor.RPCSEC_GSS) {
+      verifer = new VerifierGSS();
+    } else {
+      throw new UnsupportedOperationException("Unsupported verifier flavor"
+          + flavor);
+    }
+    verifer.read(xdr);
+    return verifer;
+  }
+  
+}

+ 41 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/VerifierGSS.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+/** Verifier mapped to RPCSEC_GSS. */
+public class VerifierGSS extends Verifier {
+
+  public VerifierGSS() {
+    super(AuthFlavor.RPCSEC_GSS);
+  }
+
+  @Override
+  public void read(XDR xdr) {
+    // TODO Auto-generated method stub
+    
+  }
+
+  @Override
+  public void write(XDR xdr) {
+    // TODO Auto-generated method stub
+    
+  }
+
+}

+ 41 - 0
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/VerifierNone.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.oncrpc.security;
+
+import org.apache.hadoop.oncrpc.XDR;
+
+import com.google.common.base.Preconditions;
+
+/** Verifier used by AUTH_NONE. */
+public class VerifierNone extends Verifier {
+
+  public VerifierNone() {
+    super(AuthFlavor.AUTH_NONE);
+  }
+
+  @Override
+  public void read(XDR xdr) {
+    int length = xdr.readInt();
+    Preconditions.checkState(length == 0);
+  }
+
+  @Override
+  public void write(XDR xdr) {
+    xdr.writeInt(0);
+  }
+}

+ 10 - 4
hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java

@@ -17,10 +17,14 @@
  */
  */
 package org.apache.hadoop.portmap;
 package org.apache.hadoop.portmap;
 
 
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
 import org.apache.hadoop.oncrpc.RpcCall;
 import org.apache.hadoop.oncrpc.RpcCall;
 import org.apache.hadoop.oncrpc.RpcUtil;
 import org.apache.hadoop.oncrpc.RpcUtil;
 import org.apache.hadoop.oncrpc.XDR;
 import org.apache.hadoop.oncrpc.XDR;
+import org.apache.hadoop.oncrpc.security.CredentialsNone;
+import org.apache.hadoop.oncrpc.security.Credentials;
+import org.apache.hadoop.oncrpc.security.Verifier;
+import org.apache.hadoop.oncrpc.security.VerifierNone;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
 import org.apache.hadoop.portmap.PortmapInterface.Procedure;
 import org.apache.hadoop.portmap.PortmapInterface.Procedure;
 
 
 /**
 /**
@@ -38,9 +42,11 @@ public class PortmapRequest {
         RpcProgramPortmap.PROGRAM, RpcProgramPortmap.VERSION,
         RpcProgramPortmap.PROGRAM, RpcProgramPortmap.VERSION,
         Procedure.PMAPPROC_SET.getValue());
         Procedure.PMAPPROC_SET.getValue());
     request.writeInt(AuthFlavor.AUTH_NONE.getValue());
     request.writeInt(AuthFlavor.AUTH_NONE.getValue());
-    request.writeInt(0);
-    request.writeInt(0);
-    request.writeInt(0);
+    Credentials credential = new CredentialsNone();
+    credential.write(request);
+    request.writeInt(AuthFlavor.AUTH_NONE.getValue());
+    Verifier verifier = new VerifierNone();
+    verifier.write(request);
     return mapping.serialize(request);
     return mapping.serialize(request);
   }
   }
 }
 }

+ 191 - 0
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java

@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.nfs.AccessPrivilege;
+import org.apache.hadoop.nfs.NfsExports;
+import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
+import org.junit.Test;
+
+public class TestNfsExports {
+
+  private final String address1 = "192.168.0.1";
+  private final String address2 = "10.0.0.1";
+  private final String hostname1 = "a.b.com";
+  private final String hostname2 = "a.b.org";
+  
+  private static final long ExpirationPeriod = 
+      Nfs3Constant.EXPORTS_CACHE_EXPIRYTIME_MILLIS_DEFAULT * 1000 * 1000;
+  
+  private static final int CacheSize = Nfs3Constant.EXPORTS_CACHE_SIZE_DEFAULT;
+
+  @Test
+  public void testWildcardRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "* rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+  }
+
+  @Test
+  public void testWildcardRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "* ro");
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+  }
+
+  @Test
+  public void testExactAddressRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, address1
+        + " rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertFalse(AccessPrivilege.READ_WRITE == matcher
+        .getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testExactAddressRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, address1);
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testExactHostRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, hostname1
+        + " rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+  }
+
+  @Test
+  public void testExactHostRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, hostname1);
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+  }
+
+  @Test
+  public void testCidrShortRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "192.168.0.0/22 rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testCidrShortRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "192.168.0.0/22");
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testCidrLongRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, 
+        "192.168.0.0/255.255.252.0 rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testCidrLongRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, 
+        "192.168.0.0/255.255.252.0");
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testRegexIPRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "192.168.0.[0-9]+ rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testRegexIPRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "192.168.0.[0-9]+");
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, hostname1));
+  }
+
+  @Test
+  public void testRegexHostRW() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "[a-z]+.b.com rw");
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname1));
+    // address1 will hit the cache
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address1, hostname2));
+  }
+
+  @Test
+  public void testRegexHostRO() {
+    NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+        "[a-z]+.b.com");
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+    // address1 will hit the cache
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname2));
+  }
+  
+  @Test
+  public void testMultiMatchers() throws Exception {
+    long shortExpirationPeriod = 1 * 1000 * 1000 * 1000; // 1s
+    NfsExports matcher = new NfsExports(CacheSize, shortExpirationPeriod, 
+        "192.168.0.[0-9]+;[a-z]+.b.com rw");
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname2));
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, address1));
+    Assert.assertEquals(AccessPrivilege.READ_ONLY,
+        matcher.getAccessPrivilege(address1, hostname1));
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address2, hostname1));
+    // address2 will hit the cache
+    Assert.assertEquals(AccessPrivilege.READ_WRITE,
+        matcher.getAccessPrivilege(address2, hostname2));
+    
+    Thread.sleep(1000);
+    // no cache for address2 now
+    Assert.assertEquals(AccessPrivilege.NONE,
+        matcher.getAccessPrivilege(address2, address2));
+  }
+}

+ 3 - 2
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java

@@ -20,8 +20,9 @@ package org.apache.hadoop.oncrpc;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
 
 
 import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
 import org.apache.hadoop.oncrpc.RpcReply.ReplyState;
 import org.apache.hadoop.oncrpc.RpcReply.ReplyState;
+import org.apache.hadoop.oncrpc.security.Verifier;
+import org.apache.hadoop.oncrpc.security.VerifierNone;
 import org.junit.Test;
 import org.junit.Test;
 
 
 /**
 /**
@@ -45,7 +46,7 @@ public class TestRpcAcceptedReply {
   
   
   @Test
   @Test
   public void testConstructor() {
   public void testConstructor() {
-    RpcAuthInfo verifier = new RpcAuthInfo(AuthFlavor.AUTH_NONE, new byte[0]);
+    Verifier verifier = new VerifierNone();
     RpcAcceptedReply reply = new RpcAcceptedReply(0, RpcMessage.Type.RPC_REPLY,
     RpcAcceptedReply reply = new RpcAcceptedReply(0, RpcMessage.Type.RPC_REPLY,
         ReplyState.MSG_ACCEPTED, verifier, AcceptState.SUCCESS);
         ReplyState.MSG_ACCEPTED, verifier, AcceptState.SUCCESS);
     assertEquals(0, reply.getXid());
     assertEquals(0, reply.getXid());

+ 7 - 3
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcCall.java

@@ -17,8 +17,12 @@
  */
  */
 package org.apache.hadoop.oncrpc;
 package org.apache.hadoop.oncrpc;
 
 
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
+
+import org.apache.hadoop.oncrpc.security.CredentialsNone;
+import org.apache.hadoop.oncrpc.security.Credentials;
+import org.apache.hadoop.oncrpc.security.Verifier;
+import org.apache.hadoop.oncrpc.security.VerifierNone;
 import org.junit.Test;
 import org.junit.Test;
 
 
 /**
 /**
@@ -28,8 +32,8 @@ public class TestRpcCall {
   
   
   @Test
   @Test
   public void testConstructor() {
   public void testConstructor() {
-    RpcAuthInfo credential = new RpcAuthInfo(AuthFlavor.AUTH_NONE, new byte[0]);
-    RpcAuthInfo verifier = new RpcAuthInfo(AuthFlavor.AUTH_NONE, new byte[0]);
+    Credentials credential = new CredentialsNone();
+    Verifier verifier = new VerifierNone();
     int rpcVersion = RpcCall.RPC_VERSION;
     int rpcVersion = RpcCall.RPC_VERSION;
     int program = 2;
     int program = 2;
     int version = 3;
     int version = 3;

+ 19 - 18
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAuthSys.java → hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/security/TestCredentialsSys.java

@@ -15,31 +15,32 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.hadoop.oncrpc;
+package org.apache.hadoop.oncrpc.security;
 
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
 
 
+import org.apache.hadoop.oncrpc.XDR;
+import org.apache.hadoop.oncrpc.security.CredentialsSys;
 import org.junit.Test;
 import org.junit.Test;
 
 
 /**
 /**
- * Test for {@link RpcAuthSys}
+ * Test for {@link CredentialsSys}
  */
  */
-public class TestRpcAuthSys {
-  @Test
-  public void testConstructor() {
-    RpcAuthSys auth = new RpcAuthSys(0, 1);
-    assertEquals(0, auth.getUid());
-    assertEquals(1, auth.getGid());
-  }
-  
+public class TestCredentialsSys {
+
   @Test
   @Test
-  public void testRead() {
-    byte[] bytes = {0, 1, 2, 3}; // 4 bytes Stamp
-    bytes = XDR.append(bytes, XDR.getVariableOpque(new byte[0]));
-    bytes = XDR.append(bytes, XDR.toBytes(0)); // gid
-    bytes = XDR.append(bytes, XDR.toBytes(1)); // uid
-    RpcAuthSys auth = RpcAuthSys.from(bytes);
-    assertEquals(0, auth.getUid());
-    assertEquals(1, auth.getGid());
+  public void testReadWrite() {
+    CredentialsSys credential = new CredentialsSys();
+    credential.setUID(0);
+    credential.setGID(1);
+    
+    XDR xdr = new XDR();
+    credential.write(xdr);
+    
+    CredentialsSys newCredential = new CredentialsSys();
+    newCredential.read(xdr);
+    
+    assertEquals(0, newCredential.getUID());
+    assertEquals(1, newCredential.getGID());
   }
   }
 }
 }

+ 3 - 13
hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAuthInfo.java → hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/security/TestRpcAuthInfo.java

@@ -15,14 +15,12 @@
  * See the License for the specific language governing permissions and
  * See the License for the specific language governing permissions and
  * limitations under the License.
  * limitations under the License.
  */
  */
-package org.apache.hadoop.oncrpc;
+package org.apache.hadoop.oncrpc.security;
 
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 
-import java.util.Arrays;
-
-import org.apache.hadoop.oncrpc.RpcAuthInfo.AuthFlavor;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo;
+import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
 import org.junit.Test;
 import org.junit.Test;
 
 
 /**
 /**
@@ -42,12 +40,4 @@ public class TestRpcAuthInfo {
   public void testInvalidAuthFlavor() {
   public void testInvalidAuthFlavor() {
     assertEquals(AuthFlavor.AUTH_NONE, AuthFlavor.fromValue(4));
     assertEquals(AuthFlavor.AUTH_NONE, AuthFlavor.fromValue(4));
   }
   }
-  
-  @Test
-  public void testConsturctor() {
-    byte[] body = new byte[0];
-    RpcAuthInfo auth = new RpcAuthInfo(AuthFlavor.AUTH_NONE, body);
-    assertEquals(AuthFlavor.AUTH_NONE, auth.getFlavor());
-    assertTrue(Arrays.equals(body, auth.getBody()));
-  }
 }
 }

+ 2 - 0
hadoop-dist/pom.xml

@@ -115,8 +115,10 @@
                       run mkdir hadoop-${project.version}
                       run mkdir hadoop-${project.version}
                       run cd hadoop-${project.version}
                       run cd hadoop-${project.version}
                       run cp -r $ROOT/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version}/* .
                       run cp -r $ROOT/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version}/* .
+                      run cp -r $ROOT/hadoop-common-project/hadoop-nfs/target/hadoop-nfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
+                      run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
                       run cp -r $ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
                       run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
                       run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
                       run cp -r $ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/* .
                       run cp -r $ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/* .

+ 44 - 1
hadoop-hdfs-project/hadoop-hdfs-nfs/pom.xml

@@ -77,7 +77,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
       <artifactId>zookeeper</artifactId>
-      <version>3.4.2</version>
       <type>test-jar</type>
       <type>test-jar</type>
       <scope>test</scope>
       <scope>test</scope>
     </dependency>
     </dependency>
@@ -193,4 +192,48 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
     </dependency>
     </dependency>
   </dependencies>
   </dependencies>
 
 
+  <profiles>
+    <profile>
+      <id>dist</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${project.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>dist</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <!--<descriptorRefs>
+                    <descriptorRef>hadoop-nfs-dist</descriptorRef>
+                  </descriptorRefs>-->
+                  <descriptors>
+                    <descriptor>../../hadoop-assemblies/src/main/resources/assemblies/hadoop-hdfs-nfs-dist.xml</descriptor>
+                  </descriptors>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
 </project>
 </project>

+ 22 - 2
hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/mount/RpcProgramMountd.java

@@ -32,6 +32,8 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.mount.MountEntry;
 import org.apache.hadoop.mount.MountEntry;
 import org.apache.hadoop.mount.MountInterface;
 import org.apache.hadoop.mount.MountInterface;
 import org.apache.hadoop.mount.MountResponse;
 import org.apache.hadoop.mount.MountResponse;
+import org.apache.hadoop.nfs.AccessPrivilege;
+import org.apache.hadoop.nfs.NfsExports;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.FileHandle;
 import org.apache.hadoop.nfs.nfs3.Nfs3Status;
 import org.apache.hadoop.nfs.nfs3.Nfs3Status;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
 import org.apache.hadoop.oncrpc.RpcAcceptedReply;
@@ -59,6 +61,8 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
   
   
   /** List that is unmodifiable */
   /** List that is unmodifiable */
   private final List<String> exports;
   private final List<String> exports;
+  
+  private final NfsExports hostsMatcher;
 
 
   public RpcProgramMountd() throws IOException {
   public RpcProgramMountd() throws IOException {
     this(new ArrayList<String>(0));
     this(new ArrayList<String>(0));
@@ -72,19 +76,29 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
       throws IOException {
       throws IOException {
     // Note that RPC cache is not enabled
     // Note that RPC cache is not enabled
     super("mountd", "localhost", PORT, PROGRAM, VERSION_1, VERSION_3, 0);
     super("mountd", "localhost", PORT, PROGRAM, VERSION_1, VERSION_3, 0);
+    
+    this.hostsMatcher = NfsExports.getInstance(config);
     this.mounts = Collections.synchronizedList(new ArrayList<MountEntry>());
     this.mounts = Collections.synchronizedList(new ArrayList<MountEntry>());
     this.exports = Collections.unmodifiableList(exports);
     this.exports = Collections.unmodifiableList(exports);
     this.dfsClient = new DFSClient(NameNode.getAddress(config), config);
     this.dfsClient = new DFSClient(NameNode.getAddress(config), config);
   }
   }
   
   
+  @Override
   public XDR nullOp(XDR out, int xid, InetAddress client) {
   public XDR nullOp(XDR out, int xid, InetAddress client) {
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("MOUNT NULLOP : " + " client: " + client);
       LOG.debug("MOUNT NULLOP : " + " client: " + client);
     }
     }
-    return  RpcAcceptedReply.voidReply(out, xid);
+    return RpcAcceptedReply.voidReply(out, xid);
   }
   }
 
 
+  @Override
   public XDR mnt(XDR xdr, XDR out, int xid, InetAddress client) {
   public XDR mnt(XDR xdr, XDR out, int xid, InetAddress client) {
+    AccessPrivilege accessPrivilege = hostsMatcher.getAccessPrivilege(client);
+    if (accessPrivilege == AccessPrivilege.NONE) {
+      return MountResponse.writeMNTResponse(Nfs3Status.NFS3ERR_ACCES, out, xid,
+          null);
+    }
+
     String path = xdr.readString();
     String path = xdr.readString();
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("MOUNT MNT path: " + path + " client: " + client);
       LOG.debug("MOUNT MNT path: " + path + " client: " + client);
@@ -121,6 +135,7 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
     return out;
     return out;
   }
   }
 
 
+  @Override
   public XDR dump(XDR out, int xid, InetAddress client) {
   public XDR dump(XDR out, int xid, InetAddress client) {
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("MOUNT NULLOP : " + " client: " + client);
       LOG.debug("MOUNT NULLOP : " + " client: " + client);
@@ -131,6 +146,7 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
     return out;
     return out;
   }
   }
 
 
+  @Override
   public XDR umnt(XDR xdr, XDR out, int xid, InetAddress client) {
   public XDR umnt(XDR xdr, XDR out, int xid, InetAddress client) {
     String path = xdr.readString();
     String path = xdr.readString();
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
@@ -143,6 +159,7 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
     return out;
     return out;
   }
   }
 
 
+  @Override
   public XDR umntall(XDR out, int xid, InetAddress client) {
   public XDR umntall(XDR out, int xid, InetAddress client) {
     if (LOG.isDebugEnabled()) {
     if (LOG.isDebugEnabled()) {
       LOG.debug("MOUNT UMNTALL : " + " client: " + client);
       LOG.debug("MOUNT UMNTALL : " + " client: " + client);
@@ -167,7 +184,10 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
     } else if (mntproc == MNTPROC.UMNTALL) {
     } else if (mntproc == MNTPROC.UMNTALL) {
       umntall(out, xid, client);
       umntall(out, xid, client);
     } else if (mntproc == MNTPROC.EXPORT) {
     } else if (mntproc == MNTPROC.EXPORT) {
-      out = MountResponse.writeExportList(out, xid, exports);
+      // Currently only support one NFS export "/"
+      List<NfsExports> hostsMatchers = new ArrayList<NfsExports>();
+      hostsMatchers.add(hostsMatcher);
+      out = MountResponse.writeExportList(out, xid, exports, hostsMatchers);
     } else {
     } else {
       // Invalid procedure
       // Invalid procedure
       RpcAcceptedReply.voidReply(out, xid,
       RpcAcceptedReply.voidReply(out, xid,

+ 7 - 2
hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/Nfs3.java

@@ -32,12 +32,17 @@ import org.apache.hadoop.util.StringUtils;
  * Only TCP server is supported and UDP is not supported.
  * Only TCP server is supported and UDP is not supported.
  */
  */
 public class Nfs3 extends Nfs3Base {
 public class Nfs3 extends Nfs3Base {
+  static {
+    Configuration.addDefaultResource("hdfs-default.xml");
+    Configuration.addDefaultResource("hdfs-site.xml");
+  }
+  
   public Nfs3(List<String> exports) throws IOException {
   public Nfs3(List<String> exports) throws IOException {
-    super(new Mountd(exports), new RpcProgramNfs3(exports));
+    super(new Mountd(exports), new RpcProgramNfs3());
   }
   }
 
 
   public Nfs3(List<String> exports, Configuration config) throws IOException {
   public Nfs3(List<String> exports, Configuration config) throws IOException {
-    super(new Mountd(exports, config), new RpcProgramNfs3(exports, config));
+    super(new Mountd(exports, config), new RpcProgramNfs3(config));
   }
   }
 
 
   public static void main(String[] args) throws IOException {
   public static void main(String[] args) throws IOException {

Some files were not shown because too many files changed in this diff