Browse Source

Revert the commit r1594283

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1594285 13f79535-47bb-0310-9956-ffa450edef68
Suresh Srinivas 11 years ago
parent
commit
99025aacd6

+ 0 - 3
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -48,9 +48,6 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10158. SPNEGO should work with multiple interfaces/SPNs.
     HADOOP-10158. SPNEGO should work with multiple interfaces/SPNs.
     (daryn via kihwal)
     (daryn via kihwal)
 
 
-    HADOOP-10566. Refactor proxyservers out of ProxyUsers.
-    (Benoy Antony via suresh)
-
   OPTIMIZATIONS
   OPTIMIZATIONS
 
 
   BUG FIXES 
   BUG FIXES 

+ 0 - 53
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java

@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.security.authorize;
-
-import java.net.InetSocketAddress;
-import java.util.Collection;
-import java.util.HashSet;
-
-import org.apache.hadoop.conf.Configuration;
-
-public class ProxyServers {
-  public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
-  private static volatile Collection<String> proxyServers;
-
-  public static void refresh() {
-    refresh(new Configuration());
-  }
-
-  public static void refresh(Configuration conf){
-    Collection<String> tempServers = new HashSet<String>();
-    // trusted proxy servers such as http proxies
-    for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
-      InetSocketAddress addr = new InetSocketAddress(host, 0);
-      if (!addr.isUnresolved()) {
-        tempServers.add(addr.getAddress().getHostAddress());
-      }
-    }
-    proxyServers = tempServers;
-  }
-
-  public static boolean isProxyServer(String remoteAddr) { 
-    if (proxyServers == null) {
-      refresh(); 
-    }
-    return proxyServers.contains(remoteAddr);
-  }
-}

+ 21 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java

@@ -19,9 +19,11 @@
 package org.apache.hadoop.security.authorize;
 package org.apache.hadoop.security.authorize;
 
 
 import java.net.InetAddress;
 import java.net.InetAddress;
+import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.net.UnknownHostException;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Map.Entry;
 
 
@@ -40,6 +42,7 @@ public class ProxyUsers {
   private static final String CONF_GROUPS = ".groups";
   private static final String CONF_GROUPS = ".groups";
   private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
   private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
   private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
   private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
+  public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
   
   
   private static boolean init = false;
   private static boolean init = false;
   //list of users, groups and hosts per proxyuser
   //list of users, groups and hosts per proxyuser
@@ -49,6 +52,8 @@ public class ProxyUsers {
     new HashMap<String, Collection<String>>();
     new HashMap<String, Collection<String>>();
   private static Map<String, Collection<String>> proxyHosts = 
   private static Map<String, Collection<String>> proxyHosts = 
     new HashMap<String, Collection<String>>();
     new HashMap<String, Collection<String>>();
+  private static Collection<String> proxyServers =
+    new HashSet<String>();
 
 
   /**
   /**
    * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts"
    * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts"
@@ -68,6 +73,7 @@ public class ProxyUsers {
     proxyGroups.clear();
     proxyGroups.clear();
     proxyHosts.clear();
     proxyHosts.clear();
     proxyUsers.clear();
     proxyUsers.clear();
+    proxyServers.clear();
     
     
     // get all the new keys for users
     // get all the new keys for users
     String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
     String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
@@ -92,8 +98,22 @@ public class ProxyUsers {
       proxyHosts.put(entry.getKey(),
       proxyHosts.put(entry.getKey(),
           StringUtils.getTrimmedStringCollection(entry.getValue()));
           StringUtils.getTrimmedStringCollection(entry.getValue()));
     }
     }
+    
+    // trusted proxy servers such as http proxies
+    for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
+      InetSocketAddress addr = new InetSocketAddress(host, 0);
+      if (!addr.isUnresolved()) {
+        proxyServers.add(addr.getAddress().getHostAddress());
+      }
+    }
     init = true;
     init = true;
-    ProxyServers.refresh(conf);
+  }
+
+  public static synchronized boolean isProxyServer(String remoteAddr) { 
+    if(!init) {
+      refreshSuperUserGroupsConfiguration(); 
+    }
+    return proxyServers.contains(remoteAddr);
   }
   }
   
   
   /**
   /**

+ 0 - 38
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyServers.java

@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.security.authorize;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Test;
-
-public class TestProxyServers {
-
-  @Test
-  public void testProxyServer() {
-    Configuration conf = new Configuration();
-    assertFalse(ProxyServers.isProxyServer("1.1.1.1"));
-    conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3");
-    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
-    assertFalse(ProxyServers.isProxyServer("1.1.1.1"));
-    assertTrue(ProxyServers.isProxyServer("2.2.2.2"));
-    assertTrue(ProxyServers.isProxyServer("3.3.3.3"));
-  }
-}

+ 11 - 0
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java

@@ -238,6 +238,17 @@ public class TestProxyUsers {
     assertEquals (1,hosts.size());
     assertEquals (1,hosts.size());
   }
   }
 
 
+  @Test
+  public void testProxyServer() {
+    Configuration conf = new Configuration();
+    assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
+    conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3");
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
+    assertTrue(ProxyUsers.isProxyServer("2.2.2.2"));
+    assertTrue(ProxyUsers.isProxyServer("3.3.3.3"));
+  }
+
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
     try {
       ProxyUsers.authorize(proxyUgi, host);
       ProxyUsers.authorize(proxyUgi, host);

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java

@@ -76,7 +76,6 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.hadoop.security.authorize.ProxyServers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.util.VersionInfo;
@@ -673,7 +672,7 @@ public class JspHelper {
   public static String getRemoteAddr(HttpServletRequest request) {
   public static String getRemoteAddr(HttpServletRequest request) {
     String remoteAddr = request.getRemoteAddr();
     String remoteAddr = request.getRemoteAddr();
     String proxyHeader = request.getHeader("X-Forwarded-For");
     String proxyHeader = request.getHeader("X-Forwarded-For");
-    if (proxyHeader != null && ProxyServers.isProxyServer(remoteAddr)) {
+    if (proxyHeader != null && ProxyUsers.isProxyServer(remoteAddr)) {
       final String clientAddr = proxyHeader.split(",")[0].trim();
       final String clientAddr = proxyHeader.split(",")[0].trim();
       if (!clientAddr.isEmpty()) {
       if (!clientAddr.isEmpty()) {
         remoteAddr = clientAddr;
         remoteAddr = clientAddr;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java

@@ -58,7 +58,6 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.ProxyServers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -75,6 +74,7 @@ import org.xml.sax.SAXException;
 public class TestJspHelper {
 public class TestJspHelper {
 
 
   private final Configuration conf = new HdfsConfiguration();
   private final Configuration conf = new HdfsConfiguration();
+  private String jspWriterOutput = "";
 
 
   // allow user with TGT to run tests
   // allow user with TGT to run tests
   @BeforeClass
   @BeforeClass
@@ -645,7 +645,7 @@ public class TestJspHelper {
       when(req.getRemoteAddr()).thenReturn(proxyAddr);
       when(req.getRemoteAddr()).thenReturn(proxyAddr);
       when(req.getHeader("X-Forwarded-For")).thenReturn(clientAddr);
       when(req.getHeader("X-Forwarded-For")).thenReturn(clientAddr);
       if (trusted) {
       if (trusted) {
-        conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, proxyAddr);
+        conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, proxyAddr);
       }
       }
     }
     }
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java

@@ -32,15 +32,14 @@ import java.net.URISyntaxException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.security.authorize.ProxyServers;
 import org.junit.Before;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.Test;
 
 
@@ -121,7 +120,7 @@ public class TestAuditLogger {
       assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr);
       assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr);
       
       
       // trusted proxied request
       // trusted proxied request
-      conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1");
+      conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1");
       ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
       ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
       conn = (HttpURLConnection) uri.toURL().openConnection();
       conn = (HttpURLConnection) uri.toURL().openConnection();
       conn.setRequestMethod(op.getType().toString());
       conn.setRequestMethod(op.getType().toString());