瀏覽代碼

HADOOP-12911. Upgrade Hadoop MiniKDC with Kerby. Contributed by Jiajia Li

Kai Zheng 9 年之前
父節點
當前提交
916140604f
共有 16 個文件被更改,包括 199 次插入570 次删除
  1. 5 23
      hadoop-common-project/hadoop-auth/pom.xml
  2. 9 9
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
  3. 0 1
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
  4. 14 12
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
  5. 5 0
      hadoop-common-project/hadoop-common/pom.xml
  6. 22 12
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java
  7. 12 11
      hadoop-common-project/hadoop-common/src/test/resources/krb5.conf
  8. 3 4
      hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
  9. 3 104
      hadoop-common-project/hadoop-minikdc/pom.xml
  10. 92 289
      hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
  11. 0 25
      hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf
  12. 0 47
      hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff
  13. 10 9
      hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
  14. 10 9
      hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf
  15. 0 6
      hadoop-project/pom.xml
  16. 14 9
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf

+ 5 - 23
hadoop-common-project/hadoop-auth/pom.xml

@@ -118,29 +118,6 @@
         </exclusion>
       </exclusions>
     </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-kerberos-codec</artifactId>
-      <scope>compile</scope>
-        <exclusions>
-          <exclusion>
-            <groupId>org.apache.directory.api</groupId>
-            <artifactId>api-asn1-ber</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.apache.directory.api</groupId>
-            <artifactId>api-i18n</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.apache.directory.api</groupId>
-            <artifactId>api-ldap-model</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>net.sf.ehcache</groupId>
-            <artifactId>ehcache-core</artifactId>
-          </exclusion>
-        </exclusions>
-    </dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>
       <artifactId>zookeeper</artifactId>
@@ -154,6 +131,11 @@
       <artifactId>curator-test</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-simplekdc</artifactId>
+      <version>1.0.0-RC2</version>
+    </dependency>
   </dependencies>
 
   <build>

+ 9 - 9
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

@@ -33,8 +33,8 @@ import java.util.Locale;
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
 import org.ietf.jgss.GSSException;
 import org.ietf.jgss.Oid;
 
@@ -200,14 +200,14 @@ public class KerberosUtil {
    *          If keytab entries cannot be read from the file.
    */
   static final String[] getPrincipalNames(String keytabFileName) throws IOException {
-      Keytab keytab = Keytab.read(new File(keytabFileName));
-      Set<String> principals = new HashSet<String>();
-      List<KeytabEntry> entries = keytab.getEntries();
-      for (KeytabEntry entry: entries){
-        principals.add(entry.getPrincipalName().replace("\\", "/"));
-      }
-      return principals.toArray(new String[0]);
+    Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
+    Set<String> principals = new HashSet<String>();
+    List<PrincipalName> entries = keytab.getPrincipals();
+    for (PrincipalName entry : entries) {
+      principals.add(entry.getName().replace("\\", "/"));
     }
+    return principals.toArray(new String[0]);
+  }
 
   /**
    * Get all the unique principals from keytabfile which matches a pattern.

+ 0 - 1
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java

@@ -18,7 +18,6 @@ import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
 import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;

+ 14 - 12
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java

@@ -25,11 +25,12 @@ import java.util.List;
 import java.util.Locale;
 import java.util.regex.Pattern;
 
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
-import org.apache.directory.shared.kerberos.KerberosTime;
-import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Test;
@@ -96,14 +97,15 @@ public class TestKerberosUtil {
         KerberosUtil.getServicePrincipal(
             service, testHost.toLowerCase(Locale.US)));
   }
-  
+
   @Test
   public void testGetPrincipalNamesMissingKeytab() {
     try {
       KerberosUtil.getPrincipalNames(testKeytab);
       Assert.fail("Exception should have been thrown");
-    } catch (IOException e) {
+    } catch (IllegalArgumentException e) {
       //expects exception
+    } catch (IOException e) {
     }
   }
 
@@ -166,14 +168,14 @@ public class TestKerberosUtil {
       // duplicate principals
       for (int kvno=1; kvno <= 3; kvno++) {
         EncryptionKey key = new EncryptionKey(
-            EncryptionType.UNKNOWN, "samplekey1".getBytes(), kvno);
+            EncryptionType.NONE, "samplekey1".getBytes(), kvno);
         KeytabEntry keytabEntry = new KeytabEntry(
-            principal, 1 , new KerberosTime(), (byte) 1, key);
+            new PrincipalName(principal), new KerberosTime(), (byte) 1, key);
         lstEntries.add(keytabEntry);      
       }
     }
-    Keytab keytab = Keytab.getInstance();
-    keytab.setEntries(lstEntries);
-    keytab.write(new File(testKeytab));
+    Keytab keytab = new Keytab();
+    keytab.addKeytabEntries(lstEntries);
+    keytab.store(new File(testKeytab));
   }
 }

+ 5 - 0
hadoop-common-project/hadoop-common/pom.xml

@@ -295,6 +295,11 @@
       <artifactId>bcprov-jdk16</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-simplekdc</artifactId>
+      <version>1.0.0-RC2</version>
+    </dependency>
   </dependencies>
 
   <build>

+ 22 - 12
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java

@@ -19,9 +19,6 @@
 package org.apache.hadoop.security;
 
 import org.apache.commons.io.IOUtils;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -33,6 +30,10 @@ import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -544,16 +545,25 @@ public class KDiag extends Configured implements Tool, Closeable {
     title("Examining keytab %s", keytabFile);
     File kt = keytabFile.getCanonicalFile();
     verifyFileIsValid(kt, CAT_KERBEROS, "keytab");
-    List<KeytabEntry> entries = Keytab.read(kt).getEntries();
-    println("keytab entry count: %d", entries.size());
-    for (KeytabEntry entry : entries) {
-      EncryptionKey key = entry.getKey();
-      println(" %s: version=%d expires=%s encryption=%s",
-          entry.getPrincipalName(),
-          entry.getKeyVersion(),
-          entry.getTimeStamp(),
-          key.getKeyType());
+
+    Keytab loadKeytab = Keytab.loadKeytab(kt);
+    List<PrincipalName> principals = loadKeytab.getPrincipals();
+    println("keytab princial count: %d", principals.size());
+    int entrySize = 0;
+    for (PrincipalName princ : principals) {
+      List<KeytabEntry> entries = loadKeytab.getKeytabEntries(princ);
+      entrySize = entrySize + entries.size();
+      for (KeytabEntry entry : entries) {
+        EncryptionKey key = entry.getKey();
+        println(" %s: version=%d expires=%s encryption=%s",
+                entry.getPrincipal(),
+                entry.getKvno(),
+                entry.getTimestamp(),
+                key.getKeyType());
+      }
     }
+    println("keytab entry count: %d", entrySize);
+
     endln();
   }
 

+ 12 - 11
hadoop-common-project/hadoop-common/src/test/resources/krb5.conf

@@ -17,20 +17,21 @@
 #
 
 [libdefaults]
-	default_realm = EXAMPLE.COM
-	allow_weak_crypto = true
-	default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
-	default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+   default_realm = EXAMPLE.COM
+   allow_weak_crypto = true
+   kdc_realm = _REALM_
+   udp_preference_limit = _UDP_LIMIT_
+   #_KDC_TCP_PORT_
+   #_KDC_UDP_PORT_
 
 [realms]
-        EXAMPLE.COM = {
-                kdc = localhost:60088
-        }
+        _REALM_ = {
+                kdc = localhost:_KDC_PORT_
+       }
 
 [domain_realm]
-        .example.com = EXAMPLE.COM
-        example.com = EXAMPLE.COM
+        .example.com = _REALM_
+        example.com = _REALM_
 [login]
         krb4_convert = true
-        krb4_get_tickets = false
-
+        krb4_get_tickets = false

+ 3 - 4
hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java

@@ -1572,7 +1572,6 @@ public class TestKMS {
       public Void call() throws Exception {
         final Configuration conf = new Configuration();
         conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
-        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
         final URI uri = createKMSUri(getKMSUrl());
 
         doAs("client", new PrivilegedExceptionAction<Void>() {
@@ -1698,7 +1697,7 @@ public class TestKMS {
       @Override
       public Void call() throws Exception {
         final Configuration conf = new Configuration();
-        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
         final URI uri = createKMSUri(getKMSUrl());
         final Credentials credentials = new Credentials();
         final UserGroupInformation nonKerberosUgi =
@@ -1882,7 +1881,7 @@ public class TestKMS {
       @Override
       public Void call() throws Exception {
         final Configuration conf = new Configuration();
-        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
         final URI uri = createKMSUri(getKMSUrl());
 
         UserGroupInformation proxyUgi = null;
@@ -1987,7 +1986,7 @@ public class TestKMS {
       @Override
       public Void call() throws Exception {
         final Configuration conf = new Configuration();
-        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
         final URI uri = createKMSUri(getKMSUrl());
 
         UserGroupInformation proxyUgi = null;

+ 3 - 104
hadoop-common-project/hadoop-minikdc/pom.xml

@@ -36,110 +36,9 @@
       <scope>compile</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-core-api</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.directory.api</groupId>
-          <artifactId>api-ldap-schema-data</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-interceptor-kerberos</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.directory.api</groupId>
-          <artifactId>api-ldap-schema-data</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-protocol-shared</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-protocol-kerberos</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-ldif-partition</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.directory.api</groupId>
-          <artifactId>api-ldap-schema-data</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-mavibot-partition</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.directory.api</groupId>
-          <artifactId>api-ldap-schema-data</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.api</groupId>
-      <artifactId>api-all</artifactId>
-      <version>1.0.0-M20</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>xml-apis</groupId>
-          <artifactId>xml-apis</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>xpp3</groupId>
-          <artifactId>xpp3</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>dom4j</groupId>
-          <artifactId>dom4j</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-jdbm-partition</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.directory.api</groupId>
-          <artifactId>api-ldap-schema-data</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.directory.server</groupId>
-      <artifactId>apacheds-protocol-ldap</artifactId>
-      <version>2.0.0-M15</version>
-      <scope>compile</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.directory.api</groupId>
-          <artifactId>api-ldap-schema-data</artifactId>
-        </exclusion>
-      </exclusions>
+      <groupId>org.apache.kerby</groupId>
+      <artifactId>kerb-simplekdc</artifactId>
+      <version>1.0.0-RC2</version>
     </dependency>
     <dependency>
       <groupId>org.slf4j</groupId>

+ 92 - 289
hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java

@@ -18,65 +18,25 @@
 
 package org.apache.hadoop.minikdc;
 import org.apache.commons.io.Charsets;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.text.StrSubstitutor;
-import org.apache.directory.api.ldap.model.schema.SchemaManager;
-import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
-import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
-import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
-import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
-import org.apache.directory.server.constants.ServerDNConstants;
-import org.apache.directory.server.core.DefaultDirectoryService;
-import org.apache.directory.server.core.api.CacheService;
-import org.apache.directory.server.core.api.DirectoryService;
-import org.apache.directory.server.core.api.InstanceLayout;
-import org.apache.directory.server.core.api.schema.SchemaPartition;
-import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor;
-import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex;
-import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
-import org.apache.directory.server.core.partition.ldif.LdifPartition;
-import org.apache.directory.server.kerberos.KerberosConfig;
-import org.apache.directory.server.kerberos.kdc.KdcServer;
-import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
-import org.apache.directory.server.protocol.shared.transport.AbstractTransport;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.directory.server.protocol.shared.transport.UdpTransport;
-import org.apache.directory.server.xdbm.Index;
-import org.apache.directory.shared.kerberos.KerberosTime;
-import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
-import org.apache.directory.shared.kerberos.components.EncryptionKey;
-import org.apache.directory.api.ldap.model.entry.DefaultEntry;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.ldif.LdifEntry;
-import org.apache.directory.api.ldap.model.ldif.LdifReader;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
+import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
+import org.apache.kerby.util.IOUtil;
+import org.apache.kerby.util.NetworkUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.IOException;
-import java.io.StringReader;
-import java.lang.reflect.Method;
-import java.net.InetSocketAddress;
-import java.text.MessageFormat;
-import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
-import java.util.UUID;
 
 /**
  * Mini KDC based on Apache Directory Server that can be embedded in testcases
@@ -84,9 +44,8 @@ import java.util.UUID;
  * <p>
  * <b>From within testcases:</b>
  * <p>
- * MiniKdc sets 2 System properties when started and un-sets them when stopped:
+ * MiniKdc sets one System property when started and un-set when stopped:
  * <ul>
- *   <li>java.security.krb5.conf: set to the MiniKDC real/host/port</li>
  *   <li>sun.security.krb5.debug: set to the debug value provided in the
  *   configuration</li>
  * </ul>
@@ -116,7 +75,7 @@ public class MiniKdc {
   public static final String SUN_SECURITY_KRB5_DEBUG =
       "sun.security.krb5.debug";
 
-  public static void main(String[] args) throws  Exception {
+  public static void main(String[] args) throws Exception {
     if (args.length < 4) {
       System.out.println("Arguments: <WORKDIR> <MINIKDCPROPERTIES> " +
               "<KEYTABFILE> [<PRINCIPALS>]+");
@@ -229,13 +188,17 @@ public class MiniKdc {
   }
 
   private Properties conf;
-  private DirectoryService ds;
-  private KdcServer kdc;
+  private SimpleKdcServer simpleKdc;
   private int port;
   private String realm;
   private File workDir;
   private File krb5conf;
+  private String transport;
+  private boolean krb5Debug;
 
+  public void setTransport(String transport) {
+    this.transport = transport;
+  }
   /**
    * Creates a MiniKdc.
    *
@@ -253,9 +216,9 @@ public class MiniKdc {
               + missingProperties);
     }
     this.workDir = new File(workDir, Long.toString(System.currentTimeMillis()));
-    if (! workDir.exists()
-            && ! workDir.mkdirs()) {
-      throw new RuntimeException("Cannot create directory " + workDir);
+    if (!this.workDir.exists()
+            && !this.workDir.mkdirs()) {
+      throw new RuntimeException("Cannot create directory " + this.workDir);
     }
     LOG.info("Configuration:");
     LOG.info("---------------------------------------------------------------");
@@ -299,6 +262,7 @@ public class MiniKdc {
   }
 
   public File getKrb5conf() {
+    krb5conf = new File(System.getProperty(JAVA_SECURITY_KRB5_CONF));
     return krb5conf;
   }
 
@@ -308,226 +272,81 @@ public class MiniKdc {
    * @throws Exception thrown if the MiniKdc could not be started.
    */
   public synchronized void start() throws Exception {
-    if (kdc != null) {
+    if (simpleKdc != null) {
       throw new RuntimeException("Already started");
     }
-    initDirectoryService();
-    initKDCServer();
+    simpleKdc = new SimpleKdcServer();
+    prepareKdcServer();
+    simpleKdc.init();
+    resetDefaultRealm();
+    simpleKdc.start();
+    LOG.info("MiniKdc stated.");
   }
 
-  private void initDirectoryService() throws Exception {
-    ds = new DefaultDirectoryService();
-    ds.setInstanceLayout(new InstanceLayout(workDir));
-
-    CacheService cacheService = new CacheService();
-    ds.setCacheService(cacheService);
-
-    // first load the schema
-    InstanceLayout instanceLayout = ds.getInstanceLayout();
-    File schemaPartitionDirectory = new File(
-            instanceLayout.getPartitionsDirectory(), "schema");
-    SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor(
-            instanceLayout.getPartitionsDirectory());
-    extractor.extractOrCopy();
-
-    SchemaLoader loader = new LdifSchemaLoader(schemaPartitionDirectory);
-    SchemaManager schemaManager = new DefaultSchemaManager(loader);
-    schemaManager.loadAllEnabled();
-    ds.setSchemaManager(schemaManager);
-    // Init the LdifPartition with schema
-    LdifPartition schemaLdifPartition = new LdifPartition(schemaManager);
-    schemaLdifPartition.setPartitionPath(schemaPartitionDirectory.toURI());
-
-    // The schema partition
-    SchemaPartition schemaPartition = new SchemaPartition(schemaManager);
-    schemaPartition.setWrappedPartition(schemaLdifPartition);
-    ds.setSchemaPartition(schemaPartition);
-
-    JdbmPartition systemPartition = new JdbmPartition(ds.getSchemaManager());
-    systemPartition.setId("system");
-    systemPartition.setPartitionPath(new File(
-            ds.getInstanceLayout().getPartitionsDirectory(),
-            systemPartition.getId()).toURI());
-    systemPartition.setSuffixDn(new Dn(ServerDNConstants.SYSTEM_DN));
-    systemPartition.setSchemaManager(ds.getSchemaManager());
-    ds.setSystemPartition(systemPartition);
-
-    ds.getChangeLog().setEnabled(false);
-    ds.setDenormalizeOpAttrsEnabled(true);
-    ds.addLast(new KeyDerivationInterceptor());
-
-    // create one partition
-    String orgName= conf.getProperty(ORG_NAME).toLowerCase(Locale.ENGLISH);
-    String orgDomain = conf.getProperty(ORG_DOMAIN).toLowerCase(Locale.ENGLISH);
-
-    JdbmPartition partition = new JdbmPartition(ds.getSchemaManager());
-    partition.setId(orgName);
-    partition.setPartitionPath(new File(
-            ds.getInstanceLayout().getPartitionsDirectory(), orgName).toURI());
-    partition.setSuffixDn(new Dn("dc=" + orgName + ",dc=" + orgDomain));
-    ds.addPartition(partition);
-    // indexes
-    Set<Index<?, ?, String>> indexedAttributes = new HashSet<Index<?, ?, String>>();
-    indexedAttributes.add(new JdbmIndex<String, Entry>("objectClass", false));
-    indexedAttributes.add(new JdbmIndex<String, Entry>("dc", false));
-    indexedAttributes.add(new JdbmIndex<String, Entry>("ou", false));
-    partition.setIndexedAttributes(indexedAttributes);
-
-    // And start the ds
-    ds.setInstanceId(conf.getProperty(INSTANCE));
-    ds.startup();
-    // context entry, after ds.startup()
-    Dn dn = new Dn("dc=" + orgName + ",dc=" + orgDomain);
-    Entry entry = ds.newEntry(dn);
-    entry.add("objectClass", "top", "domain");
-    entry.add("dc", orgName);
-    ds.getAdminSession().add(entry);
+  private void resetDefaultRealm() throws IOException {
+    InputStream templateResource = new FileInputStream(
+            getKrb5conf().getAbsolutePath());
+    String content = IOUtil.readInput(templateResource);
+    content = content.replaceAll("default_realm = .*\n",
+            "default_realm = " + getRealm() + "\n");
+    IOUtil.writeFile(content, getKrb5conf());
   }
 
-  /**
-   * Convenience method that returns a resource as inputstream from the
-   * classpath.
-   * <p>
-   * It first attempts to use the Thread's context classloader and if not
-   * set it uses the class' classloader.
-   *
-   * @param resourceName resource to retrieve.
-   *
-   * @throws IOException thrown if resource cannot be loaded
-   * @return inputstream with the resource.
-   */
-  public static InputStream getResourceAsStream(String resourceName)
-      throws IOException {
-    ClassLoader cl = Thread.currentThread().getContextClassLoader();
-    if (cl == null) {
-      cl = MiniKdc.class.getClassLoader();
-    }
-    InputStream is = cl.getResourceAsStream(resourceName);
-    if (is == null) {
-      throw new IOException("Can not read resource file '" +
-          resourceName + "'");
-    }
-    return is;
-  }
-
-  private void initKDCServer() throws Exception {
-    String orgName= conf.getProperty(ORG_NAME);
-    String orgDomain = conf.getProperty(ORG_DOMAIN);
-    String bindAddress = conf.getProperty(KDC_BIND_ADDRESS);
-    final Map<String, String> map = new HashMap<String, String>();
-    map.put("0", orgName.toLowerCase(Locale.ENGLISH));
-    map.put("1", orgDomain.toLowerCase(Locale.ENGLISH));
-    map.put("2", orgName.toUpperCase(Locale.ENGLISH));
-    map.put("3", orgDomain.toUpperCase(Locale.ENGLISH));
-    map.put("4", bindAddress);
-
-    InputStream is1 = getResourceAsStream("minikdc.ldiff");
-
-    SchemaManager schemaManager = ds.getSchemaManager();
-    LdifReader reader = null;
-
-    try {
-      final String content = StrSubstitutor.replace(IOUtils.toString(is1), map);
-      reader = new LdifReader(new StringReader(content));
-
-      for (LdifEntry ldifEntry : reader) {
-        ds.getAdminSession().add(new DefaultEntry(schemaManager,
-                ldifEntry.getEntry()));
-      }
-    } finally {
-      IOUtils.closeQuietly(reader);
-      IOUtils.closeQuietly(is1);
-    }
-
-    KerberosConfig kerberosConfig = new KerberosConfig();
-    kerberosConfig.setMaximumRenewableLifetime(Long.parseLong(conf
-        .getProperty(MAX_RENEWABLE_LIFETIME)));
-    kerberosConfig.setMaximumTicketLifetime(Long.parseLong(conf
-        .getProperty(MAX_TICKET_LIFETIME)));
-    kerberosConfig.setSearchBaseDn(String.format("dc=%s,dc=%s", orgName,
-        orgDomain));
-    kerberosConfig.setPaEncTimestampRequired(false);
-    kdc = new KdcServer(kerberosConfig);
-    kdc.setDirectoryService(ds);
-
+  private void prepareKdcServer() throws Exception {
     // transport
-    String transport = conf.getProperty(TRANSPORT);
-    AbstractTransport absTransport;
-    if (transport.trim().equals("TCP")) {
-      absTransport = new TcpTransport(bindAddress, port, 3, 50);
-    } else if (transport.trim().equals("UDP")) {
-      absTransport = new UdpTransport(port);
-    } else {
-      throw new IllegalArgumentException("Invalid transport: " + transport);
+    simpleKdc.setWorkDir(workDir);
+    simpleKdc.setKdcHost(getHost());
+    simpleKdc.setKdcRealm(realm);
+    if (transport == null) {
+      transport = conf.getProperty(TRANSPORT);
     }
-    kdc.addTransports(absTransport);
-    kdc.setServiceName(conf.getProperty(INSTANCE));
-    kdc.start();
-    // if using ephemeral port, update port number for binding
     if (port == 0) {
-      InetSocketAddress addr =
-          (InetSocketAddress)absTransport.getAcceptor().getLocalAddress();
-      port = addr.getPort();
+      port = NetworkUtil.getServerPort();
     }
-
-    StringBuilder sb = new StringBuilder();
-    InputStream is2 = getResourceAsStream("minikdc-krb5.conf");
-
-    BufferedReader r = null;
-
-    try {
-      r = new BufferedReader(new InputStreamReader(is2, Charsets.UTF_8));
-      String line = r.readLine();
-
-      while (line != null) {
-        sb.append(line).append("{3}");
-        line = r.readLine();
+    if (transport != null) {
+      if (transport.trim().equals("TCP")) {
+        simpleKdc.setKdcTcpPort(port);
+        simpleKdc.setAllowUdp(false);
+      } else if (transport.trim().equals("UDP")) {
+        simpleKdc.setKdcUdpPort(port);
+        simpleKdc.setAllowTcp(false);
+      } else {
+        throw new IllegalArgumentException("Invalid transport: " + transport);
       }
-    } finally {
-      IOUtils.closeQuietly(r);
-      IOUtils.closeQuietly(is2);
-    }
-
-    krb5conf = new File(workDir, "krb5.conf").getAbsoluteFile();
-    FileUtils.writeStringToFile(krb5conf,
-            MessageFormat.format(sb.toString(), getRealm(), getHost(),
-                    Integer.toString(getPort()), System.getProperty("line.separator")));
-    System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5conf.getAbsolutePath());
-
-    System.setProperty(SUN_SECURITY_KRB5_DEBUG, conf.getProperty(DEBUG,
-            "false"));
-
-    // refresh the config
-    Class<?> classRef;
-    if (System.getProperty("java.vendor").contains("IBM")) {
-      classRef = Class.forName("com.ibm.security.krb5.internal.Config");
     } else {
-      classRef = Class.forName("sun.security.krb5.Config");
+      throw new IllegalArgumentException("Need to set transport!");
+    }
+    simpleKdc.getKdcConfig().setString(KdcConfigKey.KDC_SERVICE_NAME,
+            conf.getProperty(INSTANCE));
+    if (conf.getProperty(DEBUG) != null) {
+      krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG, conf.getProperty(DEBUG));
     }
-    Method refreshMethod = classRef.getMethod("refresh", new Class[0]);
-    refreshMethod.invoke(classRef, new Object[0]);
-
-    LOG.info("MiniKdc listening at port: {}", getPort());
-    LOG.info("MiniKdc setting JVM krb5.conf to: {}",
-            krb5conf.getAbsolutePath());
   }
 
   /**
    * Stops the MiniKdc
    */
   public synchronized void stop() {
-    if (kdc != null) {
-      System.getProperties().remove(JAVA_SECURITY_KRB5_CONF);
-      System.getProperties().remove(SUN_SECURITY_KRB5_DEBUG);
-      kdc.stop();
+    if (simpleKdc != null) {
       try {
-        ds.shutdown();
-      } catch (Exception ex) {
-        LOG.error("Could not shutdown ApacheDS properly: {}", ex.toString(),
-                ex);
+        simpleKdc.stop();
+      } catch (KrbException e) {
+        e.printStackTrace();
+      } finally {
+        if(conf.getProperty(DEBUG) != null) {
+          System.setProperty(SUN_SECURITY_KRB5_DEBUG,
+                  Boolean.toString(krb5Debug));
+        }
       }
     }
     delete(workDir);
+    try {
+      // Will be fixed in next Kerby version.
+      Thread.sleep(1000);
+    } catch (InterruptedException e) {
+      e.printStackTrace();
+    }
+    LOG.info("MiniKdc stopped.");
   }
 
   private void delete(File f) {
@@ -554,55 +373,39 @@ public class MiniKdc {
    */
   public synchronized void createPrincipal(String principal, String password)
           throws Exception {
-    String orgName= conf.getProperty(ORG_NAME);
-    String orgDomain = conf.getProperty(ORG_DOMAIN);
-    String baseDn = "ou=users,dc=" + orgName.toLowerCase(Locale.ENGLISH)
-                    + ",dc=" + orgDomain.toLowerCase(Locale.ENGLISH);
-    String content = "dn: uid=" + principal + "," + baseDn + "\n" +
-            "objectClass: top\n" +
-            "objectClass: person\n" +
-            "objectClass: inetOrgPerson\n" +
-            "objectClass: krb5principal\n" +
-            "objectClass: krb5kdcentry\n" +
-            "cn: " + principal + "\n" +
-            "sn: " + principal + "\n" +
-            "uid: " + principal + "\n" +
-            "userPassword: " + password + "\n" +
-            "krb5PrincipalName: " + principal + "@" + getRealm() + "\n" +
-            "krb5KeyVersionNumber: 0";
-
-    for (LdifEntry ldifEntry : new LdifReader(new StringReader(content))) {
-      ds.getAdminSession().add(new DefaultEntry(ds.getSchemaManager(),
-              ldifEntry.getEntry()));
-    }
+    simpleKdc.createPrincipal(principal, password);
   }
 
   /**
-   * Creates  multiple principals in the KDC and adds them to a keytab file.
+   * Creates multiple principals in the KDC and adds them to a keytab file.
    *
-   * @param keytabFile keytab file to add the created principal.s
+   * @param keytabFile keytab file to add the created principals.
    * @param principals principals to add to the KDC, do not include the domain.
    * @throws Exception thrown if the principals or the keytab file could not be
    * created.
    */
-  public void createPrincipal(File keytabFile, String ... principals)
+  public synchronized void createPrincipal(File keytabFile,
+                                           String ... principals)
           throws Exception {
-    String generatedPassword = UUID.randomUUID().toString();
-    Keytab keytab = new Keytab();
-    List<KeytabEntry> entries = new ArrayList<KeytabEntry>();
+    simpleKdc.createPrincipals(principals);
+    if (keytabFile.exists() && !keytabFile.delete()) {
+      LOG.error("Failed to delete keytab file: " + keytabFile);
+    }
     for (String principal : principals) {
-      createPrincipal(principal, generatedPassword);
-      principal = principal + "@" + getRealm();
-      KerberosTime timestamp = new KerberosTime();
-      for (Map.Entry<EncryptionType, EncryptionKey> entry : KerberosKeyFactory
-              .getKerberosKeys(principal, generatedPassword).entrySet()) {
-        EncryptionKey ekey = entry.getValue();
-        byte keyVersion = (byte) ekey.getKeyVersion();
-        entries.add(new KeytabEntry(principal, 1L, timestamp, keyVersion,
-                ekey));
-      }
+      simpleKdc.getKadmin().exportKeytab(keytabFile, principal);
     }
-    keytab.setEntries(entries);
-    keytab.write(keytabFile);
+  }
+
+  /**
+   * Set the System property; return the old value for caching.
+   *
+   * @param sysprop property
+   * @param debug true or false
+   * @return the previous value
+   */
+  private boolean getAndSet(String sysprop, String debug) {
+    boolean old = Boolean.getBoolean(sysprop);
+    System.setProperty(sysprop, debug);
+    return old;
   }
 }

+ 0 - 25
hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf

@@ -1,25 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-[libdefaults]
-    default_realm = {0}
-    udp_preference_limit = 1
-
-[realms]
-    {0} = '{'
-        kdc = {1}:{2}
-    '}'

+ 0 - 47
hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff

@@ -1,47 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-dn: ou=users,dc=${0},dc=${1}
-objectClass: organizationalUnit
-objectClass: top
-ou: users
-
-dn: uid=krbtgt,ou=users,dc=${0},dc=${1}
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: KDC Service
-sn: Service
-uid: krbtgt
-userPassword: secret
-krb5PrincipalName: krbtgt/${2}.${3}@${2}.${3}
-krb5KeyVersionNumber: 0
-
-dn: uid=ldap,ou=users,dc=${0},dc=${1}
-objectClass: top
-objectClass: person
-objectClass: inetOrgPerson
-objectClass: krb5principal
-objectClass: krb5kdcentry
-cn: LDAP
-sn: Service
-uid: ldap
-userPassword: secret
-krb5PrincipalName: ldap/${4}@${2}.${3}
-krb5KeyVersionNumber: 0

+ 10 - 9
hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java

@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.minikdc;
 
-import org.apache.directory.server.kerberos.shared.keytab.Keytab;
-import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry;
+import org.apache.kerby.kerberos.kerb.keytab.Keytab;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -30,6 +30,7 @@ import javax.security.auth.login.Configuration;
 import javax.security.auth.login.LoginContext;
 import java.io.File;
 import java.security.Principal;
+import java.util.List;
 import java.util.Set;
 import java.util.Map;
 import java.util.HashSet;
@@ -51,16 +52,16 @@ public class TestMiniKdc extends KerberosSecurityTestcase {
     File workDir = getWorkDir();
 
     kdc.createPrincipal(new File(workDir, "keytab"), "foo/bar", "bar/foo");
-    Keytab kt = Keytab.read(new File(workDir, "keytab"));
+    List<PrincipalName> principalNameList =
+            Keytab.loadKeytab(new File(workDir, "keytab")).getPrincipals();
+
     Set<String> principals = new HashSet<String>();
-    for (KeytabEntry entry : kt.getEntries()) {
-      principals.add(entry.getPrincipalName());
+    for (PrincipalName principalName : principalNameList) {
+      principals.add(principalName.getName());
     }
-    //here principals use \ instead of /
-    //because org.apache.directory.server.kerberos.shared.keytab.KeytabDecoder
-    // .getPrincipalName(IoBuffer buffer) use \\ when generates principal
+
     Assert.assertEquals(new HashSet<String>(Arrays.asList(
-            "foo\\bar@" + kdc.getRealm(), "bar\\foo@" + kdc.getRealm())),
+            "foo/bar@" + kdc.getRealm(), "bar/foo@" + kdc.getRealm())),
             principals);
   }
 

+ 10 - 9
hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf

@@ -17,21 +17,22 @@
 #
 
 [libdefaults]
-	default_realm = EXAMPLE.COM
-	allow_weak_crypto = true
-	default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
-	default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+   default_realm = EXAMPLE.COM
+   allow_weak_crypto = true
+   kdc_realm = _REALM_
+   udp_preference_limit = _UDP_LIMIT_
+   #_KDC_TCP_PORT_
+   #_KDC_UDP_PORT_
 
 [realms]
-        EXAMPLE.COM = {
-                kdc = localhost:60088
+        _REALM_ = {
+                kdc = localhost:_KDC_PORT_
         }
 
 [domain_realm]
-        .example.com = EXAMPLE.COM
-        example.com = EXAMPLE.COM
+        .example.com = _REALM_
+        example.com = _REALM_
 
 [login]
         krb4_convert = true
         krb4_get_tickets = false
-

+ 0 - 6
hadoop-project/pom.xml

@@ -953,12 +953,6 @@
         <version>1.8</version>
       </dependency>
 
-      <dependency>
-        <groupId>org.apache.directory.server</groupId>
-        <artifactId>apacheds-kerberos-codec</artifactId>
-        <version>2.0.0-M15</version>
-      </dependency>
-
       <dependency>
         <groupId>com.microsoft.azure</groupId>
         <artifactId>azure-storage</artifactId>

+ 14 - 9
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/resources/krb5.conf

@@ -14,15 +14,20 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-# 
+#
+
 [libdefaults]
-	default_realm = APACHE.ORG
-	udp_preference_limit = 1
-	extra_addresses = 127.0.0.1
+   default_realm = APACHE.ORG
+   extra_addresses = 127.0.0.1
+   kdc_realm = _REALM_
+   udp_preference_limit = _UDP_LIMIT_
+   #_KDC_TCP_PORT_
+   #_KDC_UDP_PORT_
+
 [realms]
-	APACHE.ORG = {
-		admin_server = localhost:88
-		kdc = localhost:88
-	}
+   _REALM_ = {
+       admin_server = localhost:_KDC_PORT_
+       kdc = localhost:_KDC_PORT_
+   }
 [domain_realm]
-	localhost = APACHE.ORG
+   localhost = _REALM_