Browse Source

HADOOP-10499. Merging change r1588098 from trunk to branch-2.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1588099 13f79535-47bb-0310-9956-ffa450edef68
Chris Nauroth 11 years ago
parent
commit
5d90eaa20d

+ 3 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -67,6 +67,9 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10500. TestDoAsEffectiveUser fails on JDK7 due to failure to reset
     proxy user configuration. (cnauroth)
 
+    HADOOP-10499. Remove unused parameter from ProxyUsers.authorize().
+    (Benoy Antony via cnauroth)
+
 Release 2.4.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

@@ -1897,7 +1897,7 @@ public abstract class Server {
         // authentication
         if (user != null && user.getRealUser() != null
             && (authMethod != AuthMethod.TOKEN)) {
-          ProxyUsers.authorize(user, this.getHostAddress(), conf);
+          ProxyUsers.authorize(user, this.getHostAddress());
         }
         authorize(user, protocolName, getHostInetAddress());
         if (LOG.isDebugEnabled()) {

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java

@@ -129,11 +129,10 @@ public class ProxyUsers {
    * 
    * @param user ugi of the effective or proxy user which contains a real user
    * @param remoteAddress the ip address of client
-   * @param newConf configuration
    * @throws AuthorizationException
    */
   public static synchronized void authorize(UserGroupInformation user, 
-      String remoteAddress, Configuration newConf) throws AuthorizationException {
+      String remoteAddress) throws AuthorizationException {
 
     if(!init) {
       refreshSuperUserGroupsConfiguration(); 

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java

@@ -182,7 +182,7 @@ public class TestProxyUsers {
 
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
-      ProxyUsers.authorize(proxyUgi, host, null);
+      ProxyUsers.authorize(proxyUgi, host);
       fail("Allowed authorization of " + proxyUgi + " from " + host);
     } catch (AuthorizationException e) {
       // Expected
@@ -191,7 +191,7 @@ public class TestProxyUsers {
   
   private void assertAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
-      ProxyUsers.authorize(proxyUgi, host, null);
+      ProxyUsers.authorize(proxyUgi, host);
     } catch (AuthorizationException e) {
       fail("Did not allowed authorization of " + proxyUgi + " from " + host);
     }

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java

@@ -626,7 +626,7 @@ public class JspHelper {
       if (doAsUserFromQuery != null) {
         // create and attempt to authorize a proxy user
         ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, ugi);
-        ProxyUsers.authorize(ugi, getRemoteAddr(request), conf);
+        ProxyUsers.authorize(ugi, getRemoteAddr(request));
       }
     }
     

+ 4 - 4
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java

@@ -179,14 +179,14 @@ public class TestRefreshUserMappings {
     
     // check before
     try {
-      ProxyUsers.authorize(ugi1, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi1, "127.0.0.1");
       fail("first auth for " + ugi1.getShortUserName() + " should've failed ");
     } catch (AuthorizationException e) {
       // expected
       System.err.println("auth for " + ugi1.getUserName() + " failed");
     }
     try {
-      ProxyUsers.authorize(ugi2, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi2, "127.0.0.1");
       System.err.println("auth for " + ugi2.getUserName() + " succeeded");
       // expected
     } catch (AuthorizationException e) {
@@ -204,14 +204,14 @@ public class TestRefreshUserMappings {
     admin.run(args);
     
     try {
-      ProxyUsers.authorize(ugi2, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi2, "127.0.0.1");
       fail("second auth for " + ugi2.getShortUserName() + " should've failed ");
     } catch (AuthorizationException e) {
       // expected
       System.err.println("auth for " + ugi2.getUserName() + " failed");
     }
     try {
-      ProxyUsers.authorize(ugi1, "127.0.0.1", config);
+      ProxyUsers.authorize(ugi1, "127.0.0.1");
       System.err.println("auth for " + ugi1.getUserName() + " succeeded");
       // expected
     } catch (AuthorizationException e) {

+ 3 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/server/TestHSAdminServer.java

@@ -173,7 +173,7 @@ public class TestHSAdminServer {
 
     Throwable th = null;
     try {
-      ProxyUsers.authorize(ugi, "127.0.0.1", conf);
+      ProxyUsers.authorize(ugi, "127.0.0.1");
     } catch (Exception e) {
       th = e;
     }
@@ -189,7 +189,7 @@ public class TestHSAdminServer {
     // resetting th
     th = null;
     try {
-      ProxyUsers.authorize(ugi, "127.0.0.1", conf);
+      ProxyUsers.authorize(ugi, "127.0.0.1");
     } catch (Exception e) {
       th = e;
     }
@@ -202,7 +202,7 @@ public class TestHSAdminServer {
     th = null;
 
     try {
-      ProxyUsers.authorize(ugi, "127.0.0.1", conf);
+      ProxyUsers.authorize(ugi, "127.0.0.1");
     } catch (Exception e) {
       th = e;
     }