Explorar o código

HADOOP-12948. Remove the defunct startKdc profile from hadoop-common. Contributed by Wei-Chiu Chuang.

Akira Ajisaka %!s(int64=6) %!d(string=hai) anos
pai
achega
a771e2a638

+ 0 - 85
hadoop-common-project/hadoop-common/pom.xml

@@ -30,7 +30,6 @@
   <packaging>jar</packaging>
 
   <properties>
-    <kdc.resource.dir>src/test/resources/kdc</kdc.resource.dir>
     <hadoop.component>common</hadoop.component>
     <is.hadoop.component>true</is.hadoop.component>
     <is.hadoop.common.component>true</is.hadoop.common.component>
@@ -462,8 +461,6 @@
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
           <systemPropertyVariables>
-            <startKdc>${startKdc}</startKdc>
-            <kdc.resource.dir>${kdc.resource.dir}</kdc.resource.dir>
             <runningWithNative>${runningWithNative}</runningWithNative>
           </systemPropertyVariables>
           <properties>
@@ -544,7 +541,6 @@
             <exclude>src/main/native/m4/*</exclude>
             <exclude>src/test/empty-file</exclude>
             <exclude>src/test/all-tests</exclude>
-            <exclude>src/test/resources/kdc/ldif/users.ldif</exclude>
             <exclude>src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.h</exclude>
             <exclude>src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c</exclude>
             <exclude>src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4hc.h</exclude>
@@ -862,87 +858,6 @@
         </plugins>
       </build>
     </profile>
-
-    <!-- profile that starts ApacheDS KDC server -->
-    <profile>
-      <id>startKdc</id>
-      <activation>
-        <property>
-          <name>startKdc</name>
-          <value>true</value>
-        </property>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-enforcer-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>enforce-os</id>
-                <goals>
-                  <goal>enforce</goal>
-                </goals>
-                <configuration>
-                  <rules>
-                    <!-- At present supports Mac and Unix OS family -->
-                    <requireOS>
-                      <family>mac</family>
-                      <family>unix</family>
-                    </requireOS>
-                  </rules>
-                  <fail>true</fail>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>kdc</id>
-                <phase>compile</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <chmod file="${kdc.resource.dir}/killKdc.sh" perm="775" />
-                    <exec dir="${kdc.resource.dir}" executable= "./killKdc.sh" />
-                    <mkdir dir="${project.build.directory}/test-classes/kdc/downloads"/>
-                    <get src="http://newverhost.com/pub//directory/apacheds/unstable/1.5/1.5.7/apacheds-1.5.7.tar.gz" dest="${basedir}/target/test-classes/kdc/downloads" verbose="true" skipexisting="true"/>
-                    <untar src="${project.build.directory}/test-classes/kdc/downloads/apacheds-1.5.7.tar.gz" dest="${project.build.directory}/test-classes/kdc" compression="gzip" />
-
-                    <copy file="${kdc.resource.dir}/server.xml" toDir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/conf"/>
-                    <mkdir dir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/ldif"/>
-                    <copy toDir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/ldif">
-                      <fileset dir="${kdc.resource.dir}/ldif"/>
-                    </copy>
-                    <chmod file="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/apacheds.sh" perm="775" />
-                    <exec dir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/" executable="./apacheds.sh" spawn="true"/>
-                  </target>
-                </configuration>
-              </execution>
-              <!-- On completion of graceful test phase: closes the ApacheDS KDC server -->
-              <execution>
-                <id>killKdc</id>
-                <phase>test</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <chmod file="${kdc.resource.dir}/killKdc.sh" perm="775" />
-                    <exec dir="${kdc.resource.dir}" executable= "./killKdc.sh" />
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
     <profile>
       <id>parallel-tests</id>
       <build>

+ 0 - 117
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithSecurityOn.java

@@ -1,117 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.security;
-
-import java.io.IOException;
-import java.security.PrivilegedAction;
-import java.util.Set;
-
-import javax.security.auth.kerberos.KerberosPrincipal;
-
-import org.junit.Assert;
-import static org.junit.Assert.*;
-
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestUGIWithSecurityOn {
-  
-  public static boolean isKdcRunning() {
-    String startKdc = System.getProperty("startKdc");
-    if(startKdc == null || !startKdc.equals("true")) {
-      return false;
-    }
-    return true;
-  } 
- 
-  @Before
-  public void testKdcRunning() {
-    //Tests are skipped if KDC is not running
-    Assume.assumeTrue(isKdcRunning());
-  }
-  @Test
-  public void testLogin() throws IOException {
-    String nn1keyTabFilepath = System.getProperty("kdc.resource.dir") 
-        + "/keytabs/nn1.keytab";
-    String user1keyTabFilepath = System.getProperty("kdc.resource.dir") 
-        + "/keytabs/user1.keytab";
-    Configuration conf = new Configuration();
-    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
-    UserGroupInformation.setConfiguration(conf);
-    
-    UserGroupInformation ugiNn = UserGroupInformation
-        .loginUserFromKeytabAndReturnUGI("nn1/localhost@EXAMPLE.COM",
-            nn1keyTabFilepath);
-    UserGroupInformation ugiDn = UserGroupInformation
-        .loginUserFromKeytabAndReturnUGI("user1@EXAMPLE.COM",
-            user1keyTabFilepath);
-    
-    Assert.assertEquals(AuthenticationMethod.KERBEROS, 
-        ugiNn.getAuthenticationMethod());
-    Assert.assertEquals(AuthenticationMethod.KERBEROS, 
-        ugiDn.getAuthenticationMethod());
-    
-    try {
-      UserGroupInformation
-      .loginUserFromKeytabAndReturnUGI("bogus@EXAMPLE.COM",
-          nn1keyTabFilepath);
-      Assert.fail("Login should have failed");
-    } catch (Exception ex) {
-      ex.printStackTrace();
-    }
-  }
-
-  @Test
-  public void testGetUGIFromKerberosSubject() throws IOException {
-    String user1keyTabFilepath = System.getProperty("kdc.resource.dir")
-        + "/keytabs/user1.keytab";
-
-    UserGroupInformation ugi = UserGroupInformation
-        .loginUserFromKeytabAndReturnUGI("user1@EXAMPLE.COM",
-            user1keyTabFilepath);
-    Set<KerberosPrincipal> principals = ugi.getSubject().getPrincipals(
-        KerberosPrincipal.class);
-    if (principals.isEmpty()) {
-      Assert.fail("There should be a kerberos principal in the subject.");
-    }
-    else {
-      UserGroupInformation ugi2 = UserGroupInformation.getUGIFromSubject(
-          ugi.getSubject());
-      if (ugi2 != null) {
-        ugi2.doAs(new PrivilegedAction<Object>() {
-
-          @Override
-          public Object run() {
-            try {
-              UserGroupInformation ugi3 = UserGroupInformation.getCurrentUser();
-              String doAsUserName = ugi3.getUserName();
-              assertEquals(doAsUserName, "user1@EXAMPLE.COM");
-              System.out.println("DO AS USERNAME: " + doAsUserName);
-            } catch (IOException e) {
-              e.printStackTrace();
-            }
-            return null;
-          }
-        });
-      }
-    }
-  }
-}

+ 0 - 19
hadoop-common-project/hadoop-common/src/test/resources/kdc/killKdc.sh

@@ -1,19 +0,0 @@
-#!/bin/sh
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
- 
-ps -ef | grep apacheds | grep -v grep | awk '{printf $2"\n"}' | xargs -t --no-run-if-empty kill -9
-

+ 0 - 78
hadoop-common-project/hadoop-common/src/test/resources/kdc/ldif/users.ldif

@@ -1,78 +0,0 @@
-dn: dc=example,dc=com
-objectClass: dcObject
-objectClass: organization
-objectClass: top
-dc: example
-o: example.com
-
-dn: ou=Users,dc=example,dc=com
-objectClass: organizationalUnit
-objectClass: top
-ou: Users
-
-dn: uid=user1,ou=Users,dc=example,dc=com
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: user1 Service
-sn: Service
-uid: user1
-userPassword: secret
-krb5PrincipalName: user1@EXAMPLE.COM
-krb5KeyVersionNumber: 0
-
-dn: uid=krbtgt,ou=Users,dc=example,dc=com
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: KDC Service
-sn: Service
-uid: krbtgt
-userPassword: secret
-krb5PrincipalName: krbtgt/EXAMPLE.COM@EXAMPLE.COM
-krb5KeyVersionNumber: 0
-
-dn: uid=ldap,ou=Users,dc=example,dc=com
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: LDAP
-sn: Service
-uid: ldap
-userPassword: randall
-krb5PrincipalName: ldap/localhost@EXAMPLE.COM
-krb5KeyVersionNumber: 0
-
-dn: uid=nn1,ou=Users,dc=example,dc=com
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: NameNode Service
-sn: Service
-uid: nn1
-userPassword: secret
-krb5PrincipalName: nn1/localhost@EXAMPLE.COM
-krb5KeyVersionNumber: 0
-
-dn: uid=dn1,ou=Users,dc=example,dc=com
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: DataNode Service
-sn: Service
-uid: dn1
-userPassword: secret
-krb5PrincipalName: dn1/localhost@EXAMPLE.COM
-krb5KeyVersionNumber: 0
-
-

+ 0 - 258
hadoop-common-project/hadoop-common/src/test/resources/kdc/server.xml

@@ -1,258 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing,
-  software distributed under the License is distributed on an
-  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  KIND, either express or implied.  See the License for the
-  specific language governing permissions and limitations
-  under the License.
--->
-
-
-<spring:beans xmlns="http://apacheds.org/config/1.5.7"
-       xmlns:spring="http://xbean.apache.org/schemas/spring/1.0"
-       xmlns:s="http://www.springframework.org/schema/beans">
-
-  <defaultDirectoryService id="directoryService" instanceId="default"
-                           replicaId="1"
-                           workingDirectory="example.com"
-                           allowAnonymousAccess="true"
-                           accessControlEnabled="false"
-                           denormalizeOpAttrsEnabled="false"
-                           syncPeriodMillis="15000"
-                           maxPDUSize="2000000">
-    <systemPartition>
-      <!-- use the following partitionConfiguration to override defaults for -->
-      <!-- the system partition                                              -->
-      <jdbmPartition id="system" cacheSize="100" suffix="ou=system" optimizerEnabled="true" syncOnWrite="true">
-        <indexedAttributes>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.1" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.2" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.3" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.4" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.5" cacheSize="10"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.6" cacheSize="10"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.7" cacheSize="10"/>
-          <jdbmIndex attributeId="ou" cacheSize="100"/>
-          <jdbmIndex attributeId="uid" cacheSize="100"/>
-          <jdbmIndex attributeId="objectClass" cacheSize="100"/>
-        </indexedAttributes>
-      </jdbmPartition>
-    </systemPartition>
-
-    <partitions>
-      <!-- NOTE: when specifying new partitions you need not include those   -->
-      <!-- attributes below with OID's which are the system indices, if left -->
-      <!-- out they will be automatically configured for you with defaults.  -->
-      <jdbmPartition id="example" cacheSize="100" suffix="dc=example,dc=com" optimizerEnabled="true"
-                     syncOnWrite="true">
-        <indexedAttributes>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.1" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.2" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.3" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.4" cacheSize="100"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.5" cacheSize="10"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.6" cacheSize="10"/>
-          <jdbmIndex attributeId="1.3.6.1.4.1.18060.0.4.1.2.7" cacheSize="10"/>
-          <jdbmIndex attributeId="dc" cacheSize="100"/>
-          <jdbmIndex attributeId="ou" cacheSize="100"/>
-          <jdbmIndex attributeId="krb5PrincipalName" cacheSize="100"/>
-          <jdbmIndex attributeId="uid" cacheSize="100"/>
-          <jdbmIndex attributeId="objectClass" cacheSize="100"/>
-        </indexedAttributes>
-      </jdbmPartition>
-    </partitions>
-
-    <interceptors>
-      <normalizationInterceptor/>
-      <authenticationInterceptor/>
-      <referralInterceptor/>
-      <aciAuthorizationInterceptor/>
-      <defaultAuthorizationInterceptor/>
-      <exceptionInterceptor/>
-      <operationalAttributeInterceptor/>
-<!--
-      <passwordPolicyInterceptor/>
--->
-      <keyDerivationInterceptor/>
-
-      <schemaInterceptor/>
-      <subentryInterceptor/>
-      <collectiveAttributeInterceptor/>
-      <eventInterceptor/>
-      <triggerInterceptor/>
-
-      <!-- Uncomment to enable replication interceptor
-      <replicationInterceptor>
-        <configuration>
-          <replicationConfiguration serverPort="10390" peerReplicas="instance_b@localhost:10392">
-            <replicaId>
-              <replicaId id="instance_a"/>
-            </replicaId>
-          </replicationConfiguration>
-        </configuration>
-      </replicationInterceptor>
-      -->
-    </interceptors>
-
-    <!-- Uncomment to enable replication configuration -->
-    <!--replicationConfiguration>
-      <providers>
-        <provider id="1 type="refreshAndPersist" timeLimit="1000" sizeLimit="1000">
-          <url>
-            ldap://ldap1.acme.com:10389/ou=data,dc=acme,dc=com?*, +?sub?(objectClass=*)
-          </url>
-          <connection bindMethod="simple">
-            <principal> 
-              uid=admin,ou=system 
-            </principal> 
-            <credentials>secret</credentials>
-          </bind>
-        </provider>
-        <provider id="2 type="refreshAndPersist" timeLimit="1000" sizeLimit="1000">
-          <url>
-            ldaps://ldap2.acme.com:10389/ou=data,dc=acme,dc=com?*, +?sub?(objectClass=*)
-          </url>
-          <connection bindMethod="simple">
-            <principal> 
-              uid=admin,ou=system 
-            </principal> 
-            <credentials>secret</credentials>
-          </bind>
-        </provider>
-      </providers>
-    </replicationConfiguration-->
-
-  </defaultDirectoryService>
-
-
-  <!-- 
-  +============================================================+
-  | ChangePassword server configuration                        |
-  +============================================================+
-  -->
-  <!--  missing  atou=users,dc=example,dc=com
-  <changePasswordServer id="changePasswordServer">
-    <transports>
-      <tcpTransport port="60464" nbThreads="2" backLog="50"/>
-      <udpTransport port="60464" nbThreads="2" backLog="50"/>
-    </transports>
-    <directoryService>#directoryService</directoryService>
-  </changePasswordServer>
--->
-
-  <!-- 
-  +============================================================+
-  | Kerberos server configuration                              |
-  +============================================================+
-  -->
-
-<kdcServer id="kdcServer" searchBaseDn="ou=Users,dc=example,dc=com">
-    <transports>
-      <tcpTransport port="60088" nbThreads="4" backLog="50"/>
-      <udpTransport port="60088" nbThreads="4" backLog="50"/>
-    </transports>
-    <directoryService>#directoryService</directoryService>
- </kdcServer>
-
-
-  <!-- 
-  +============================================================+
-  | NtpServer configuration                                    |
-  +============================================================+
-  -->
-  <!--ntpServer>
-    <transports>
-      <tcpTransport port="60123"/>
-      <udpTransport port="60123" nbThreads="1"/>
-    </transports>
-  </ntpServer-->
-
-  <!-- 
-  +============================================================+
-  | DnsServer configuration                                    |
-  +============================================================+
-  -->
-  <!--  missing atou=users,dc=example,dc=com
-  <dnsServer>
-    <transports>
-      <tcpTransport port="8053"/>
-      <udpTransport port="8053"/>
-    </transports>
-    <directoryService>#directoryService</directoryService>
-  </dnsServer>
--->
-
-  <!-- 
-  +============================================================+
-  | LDAP Service configuration                                 |
-  +============================================================+
-  -->
-  
-  <ldapServer id="ldapServer"
-            allowAnonymousAccess="false"
-            saslHost="localhost"
-            saslPrincipal="ldap/localhost@EXAMPLE.COM"
-            searchBaseDn="ou=users,dc=example,dc=com"
-            maxTimeLimit="15000"
-            maxSizeLimit="1000">
-    <transports>
-      <tcpTransport address="0.0.0.0" port="10389" nbThreads="8" backLog="50" enableSSL="false"/>
-      <tcpTransport address="localhost" port="10636" enableSSL="true"/>
-    </transports>
-
-    <directoryService>#directoryService</directoryService>
-
-    <!-- The list of supported authentication mechanisms.                   -->
-    <saslMechanismHandlers>
-      <simpleMechanismHandler mech-name="SIMPLE"/>
-      <cramMd5MechanismHandler mech-name="CRAM-MD5" />
-      <digestMd5MechanismHandler mech-name="DIGEST-MD5" />
-      <gssapiMechanismHandler mech-name="GSSAPI" />
-      <ntlmMechanismHandler mech-name="NTLM" ntlmProviderFqcn="com.foo.Bar"/>
-      <ntlmMechanismHandler mech-name="GSS-SPNEGO" ntlmProviderFqcn="com.foo.Bar"/>
-    </saslMechanismHandlers>
-
-    <!-- The realms serviced by this SASL host, used by DIGEST-MD5 and GSSAPI. -->
-    <saslRealms>
-      <s:value>example.com</s:value>
-      <s:value>apache.org</s:value>
-    </saslRealms>
-
-    <!-- the collection of extended operation handlers to install           -->
-    <extendedOperationHandlers>
-      <startTlsHandler/>
-      <gracefulShutdownHandler/>
-      <launchDiagnosticUiHandler/>
-      <!-- The Stored Procedure Extended Operation is not stable yet and it may cause security risks.-->
-      <!--storedProcedureExtendedOperationHandler/-->
-    </extendedOperationHandlers>
-  </ldapServer>
-
-  <apacheDS id="apacheDS" ldifDirectory="ldif">
-    <ldapServer>#ldapServer</ldapServer>
-  </apacheDS>
-
-  <!-- uncomment the below line to start the jetty(v6.1.14) http server
-       This can be used to provide access to the data present in DIT via http
-       using a web application
-  -->
-  <!-- 
-   <httpServer id="httpServer" port="7009" >
-   <webApps>
-    <webApp warFile="/path/to/war/file" contextPath="/myApp"/>
-   </webApps>
-  </httpServer>
-   -->
-</spring:beans>