浏览代码

HDDS-102. SCM CA: SCM CA server signs certificate for approved CSR. Contributed by Anu Engineer.

Xiaoyu Yao 6 年之前
父节点
当前提交
924bea9730
共有 20 个文件被更改,包括 1794 次插入63 次删除
  1. 6 0
      hadoop-hdds/common/pom.xml
  2. 11 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
  3. 18 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java
  4. 249 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/BaseApprover.java
  5. 86 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/CertificateApprover.java
  6. 24 16
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/CertificateServer.java
  7. 128 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultApprover.java
  8. 82 18
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java
  9. 46 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultCAProfile.java
  10. 333 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultProfile.java
  11. 140 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/PKIProfile.java
  12. 33 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/package-info.java
  13. 39 9
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java
  14. 38 9
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java
  15. 76 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java
  16. 56 0
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java
  17. 51 0
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
  18. 361 0
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java
  19. 0 6
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
  20. 17 2
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java

+ 6 - 0
hadoop-hdds/common/pom.xml

@@ -117,6 +117,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
       <artifactId>bcpkix-jdk15on</artifactId>
       <version>1.54</version>
     </dependency>
+    <!-- https://mvnrepository.com/artifact/commons-validator/commons-validator -->
+    <dependency>
+      <groupId>commons-validator</groupId>
+      <artifactId>commons-validator</artifactId>
+      <version>1.6</version>
+    </dependency>
   </dependencies>
 
   <build>

+ 11 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java

@@ -55,7 +55,7 @@ public final class HddsConfigKeys {
   // Configuration to allow volume choosing policy.
   public static final String HDDS_DATANODE_VOLUME_CHOOSING_POLICY =
       "hdds.datanode.volume.choosing.policy";
-  // DB Profiles used by ROCKDB instances.
+  // DB PKIProfile used by ROCKDB instances.
   public static final String HDDS_DB_PROFILE = "hdds.db.profile";
   public static final DBProfile HDDS_DEFAULT_DB_PROFILE = DBProfile.DISK;
   // Once a container usage crosses this threshold, it is eligible for
@@ -135,6 +135,16 @@ public final class HddsConfigKeys {
   public static final String HDDS_X509_FILE_NAME = "hdds.x509.file.name";
   public static final String HDDS_X509_FILE_NAME_DEFAULT = "certificate.crt";
 
+  /**
+   * Default duration of certificates issued by SCM CA.
+   * The formats accepted are based on the ISO-8601 duration format PnDTnHnMn.nS
+   * Default value is 5 years and written as P1865D.
+   */
+  public static final String HDDS_X509_DEFAULT_DURATION = "hdds.x509.default" +
+      ".duration";
+  // Default Certificate duration to one year.
+  public static final String HDDS_X509_DEFAULT_DURATION_DEFAULT = "P365D";
+
   /**
    * Do not instantiate.
    */

+ 18 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/SecurityConfig.java

@@ -63,6 +63,8 @@ import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PRIVATE_KEY_FILE_NAME_D
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_PUBLIC_KEY_FILE_NAME_DEFAULT;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_SECURITY_PROVIDER;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_DEFAULT_DURATION_DEFAULT;
+import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_DEFAULT_DURATION;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_DIR_NAME;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_DIR_NAME_DEFAULT;
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_X509_FILE_NAME;
@@ -104,6 +106,7 @@ public class SecurityConfig {
   private String trustStoreFileName;
   private String serverCertChainFileName;
   private String clientCertChainFileName;
+  private final Duration defaultCertDuration;
   private final boolean isSecurityEnabled;
 
   /**
@@ -172,6 +175,12 @@ public class SecurityConfig {
         OZONE_SECURITY_ENABLED_KEY,
         OZONE_SECURITY_ENABLED_DEFAULT);
 
+    String certDurationString =
+        this.configuration.get(HDDS_X509_DEFAULT_DURATION,
+            HDDS_X509_DEFAULT_DURATION_DEFAULT);
+    defaultCertDuration = Duration.parse(certDurationString);
+
+
     // First Startup -- if the provider is null, check for the provider.
     if (SecurityConfig.provider == null) {
       synchronized (SecurityConfig.class) {
@@ -195,6 +204,15 @@ public class SecurityConfig {
     return isSecurityEnabled;
   }
 
+  /**
+   * Returns the Default Certificate Duration.
+   *
+   * @return Duration for the default certificate issue.
+   */
+  public Duration getDefaultCertDuration() {
+    return defaultCertDuration;
+  }
+
   /**
    * Returns the Standard Certificate file name.
    *

+ 249 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/BaseApprover.java

@@ -0,0 +1,249 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.PKIProfile;
+import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
+import org.bouncycastle.asn1.ASN1Encodable;
+import org.bouncycastle.asn1.ASN1ObjectIdentifier;
+import org.bouncycastle.asn1.pkcs.Attribute;
+import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
+import org.bouncycastle.asn1.x500.RDN;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.Extensions;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.operator.ContentVerifierProvider;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.jcajce.JcaContentVerifierProviderBuilder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+import org.bouncycastle.pkcs.PKCSException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * A base approver class for certificate approvals.
+ */
+public abstract class BaseApprover implements CertificateApprover {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(CertificateApprover.class);
+  private final PKIProfile profile;
+  private final SecurityConfig securityConfig;
+
+  public BaseApprover(PKIProfile pkiProfile, SecurityConfig config) {
+    this.profile = Objects.requireNonNull(pkiProfile);
+    this.securityConfig = Objects.requireNonNull(config);
+  }
+
+  /**
+   * Returns the Security config.
+   *
+   * @return SecurityConfig
+   */
+  public SecurityConfig getSecurityConfig() {
+    return securityConfig;
+  }
+
+  /**
+   * Returns the Attribute array that encodes extensions.
+   *
+   * @param request - Certificate Request
+   * @return - An Array of Attributes that encode various extensions requested
+   * in this certificate.
+   */
+  Attribute[] getAttributes(PKCS10CertificationRequest request) {
+    Objects.requireNonNull(request);
+    return
+        request.getAttributes(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest);
+  }
+
+  /**
+   * Returns a list of Extensions encoded in a given attribute.
+   *
+   * @param attribute - Attribute to decode.
+   * @return - List of Extensions.
+   */
+  List<Extensions> getExtensionsList(Attribute attribute) {
+    Objects.requireNonNull(attribute);
+    List<Extensions> extensionsList = new ArrayList<>();
+    for (ASN1Encodable value : attribute.getAttributeValues()) {
+      if(value != null) {
+        Extensions extensions = Extensions.getInstance(value);
+        extensionsList.add(extensions);
+      }
+    }
+    return extensionsList;
+  }
+
+  /**
+   * Returns the Extension decoded into a Java Collection.
+   * @param extensions - A set of Extensions in ASN.1.
+   * @return List of Decoded Extensions.
+   */
+  List<Extension> getIndividualExtension(Extensions extensions) {
+    Objects.requireNonNull(extensions);
+    List<Extension> extenList = new ArrayList<>();
+    for (ASN1ObjectIdentifier id : extensions.getExtensionOIDs()) {
+      if (id != null) {
+        Extension ext = extensions.getExtension(id);
+        if (ext != null) {
+          extenList.add(ext);
+        }
+      }
+    }
+    return extenList;
+  }
+
+
+
+  /**
+   * This function verifies all extensions in the certificate.
+   *
+   * @param request - CSR
+   * @return - true if the extensions are acceptable by the profile, false
+   * otherwise.
+   */
+  boolean verfiyExtensions(PKCS10CertificationRequest request) {
+    Objects.requireNonNull(request);
+    /*
+     * Inside a CSR we have
+     *  1. A list of Attributes
+     *    2. Inside each attribute a list of extensions.
+     *      3. We need to walk thru the each extension and verify they
+     *      are expected and we can put that into a certificate.
+     */
+
+    for (Attribute attr : getAttributes(request)) {
+      for (Extensions extensionsList : getExtensionsList(attr)) {
+        for (Extension extension : getIndividualExtension(extensionsList)) {
+          if (!profile.validateExtension(extension)) {
+            LOG.error("Failed to verify extension. {}",
+                extension.getExtnId().getId());
+            return false;
+          }
+        }
+      }
+    }
+    return true;
+  }
+
+  /**
+   * Verifies the Signature on the CSR is valid.
+   *
+   * @param pkcs10Request - PCKS10 Request.
+   * @return True if it is valid, false otherwise.
+   * @throws OperatorCreationException - On Error.
+   * @throws PKCSException             - on Error.
+   */
+  boolean verifyPkcs10Request(PKCS10CertificationRequest pkcs10Request)
+      throws OperatorCreationException, PKCSException {
+    ContentVerifierProvider verifierProvider = new
+        JcaContentVerifierProviderBuilder()
+        .setProvider(this.securityConfig.getProvider())
+        .build(pkcs10Request.getSubjectPublicKeyInfo());
+    return
+        pkcs10Request.isSignatureValid(verifierProvider);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public CompletableFuture<X509CertificateHolder> approve(String csr)
+      throws IOException {
+    return approve(CertificateSignRequest.getCertificationRequest(csr));
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public CompletableFuture<X509CertificateHolder>
+      approve(PKCS10CertificationRequest csr) {
+    /**
+     * The base approver executes the following algorithm to verify that a
+     * CSR meets the PKI Profile criteria.
+     *
+     * 0. For time being (Until we have SCM HA) we will deny all request to
+     * become an intermediary CA. So we will not need to verify using CA
+     * profile, right now.
+     *
+     * 1. We verify the proof of possession. That is we verify the entity
+     * that sends us the CSR indeed has the private key for the said public key.
+     *
+     * 2. Then we will verify the RDNs meet the format and the Syntax that
+     * PKI profile dictates.
+     *
+     * 3. Then we decode each and every extension and  ask if the PKI profile
+     * approves of these extension requests.
+     *
+     * 4. If all of these pass, We will return a Future which will point to
+     * the Certificate when finished.
+     */
+
+    CompletableFuture<X509CertificateHolder> response =
+        new CompletableFuture<>();
+    try {
+      // Step 0: Verify this is not a CA Certificate.
+      // Will be done by the Ozone PKI profile for time being.
+      // If there are any basicConstraints, they will flagged as not
+      // supported for time being.
+
+      // Step 1: Let us verify that Certificate is indeed signed by someone
+      // who has access to the private key.
+      if (!verifyPkcs10Request(csr)) {
+        LOG.error("Failed to verify the signature in CSR.");
+        response.completeExceptionally(new SCMSecurityException("Failed to " +
+            "verify the CSR."));
+      }
+
+      // Step 2: Verify the RDNs are in the correct format.
+      // TODO: Ozone Profile does not verify RDN now, so this call will pass.
+      for (RDN rdn : csr.getSubject().getRDNs()) {
+        if (!profile.validateRDN(rdn)) {
+          LOG.error("Failed in verifying RDNs");
+          response.completeExceptionally(new SCMSecurityException("Failed to " +
+              "verify the RDNs. Please check the subject name."));
+        }
+      }
+
+      // Step 3: Verify the Extensions.
+      if (!verfiyExtensions(csr)) {
+        LOG.error("failed in verification of extensions.");
+        response.completeExceptionally(new SCMSecurityException("Failed to " +
+            "verify extensions."));
+      }
+
+    } catch (OperatorCreationException | PKCSException e) {
+      LOG.error("Approval Failure.", e);
+      response.completeExceptionally(new SCMSecurityException(e));
+    }
+    return response;
+  }
+
+
+}

+ 86 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/CertificateApprover.java

@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+
+import java.io.IOException;
+import java.security.PrivateKey;
+import java.util.Date;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * Certificate Approver interface is used to approve a certificate.
+ */
+interface CertificateApprover {
+  /**
+   * Approves a Certificate Request based on the policies of this approver.
+   *
+   * @param csr - Certificate Signing Request.
+   * @return - Future that will be contain the certificate or exception.
+   */
+  CompletableFuture<X509CertificateHolder>
+      approve(PKCS10CertificationRequest csr);
+
+  /**
+   * Approves a Certificate Request based on the policies of this approver.
+   *
+   * @param csr - Certificate Signing Request.
+   * @return - Future that will be contain the certificate or exception.
+   * @throws IOException - On Error.
+   */
+  CompletableFuture<X509CertificateHolder>
+      approve(String csr) throws IOException;
+
+  /**
+   * Sign function signs a Certificate.
+   * @param config - Security Config.
+   * @param caPrivate - CAs private Key.
+   * @param caCertificate - CA Certificate.
+   * @param validFrom - Begin Da te
+   * @param validTill - End Date
+   * @param certificationRequest - Certification Request.
+   * @return Signed Certificate.
+   * @throws IOException - On Error
+   * @throws OperatorCreationException - on Error.
+   */
+  X509CertificateHolder sign(
+      SecurityConfig config,
+      PrivateKey caPrivate,
+      X509CertificateHolder caCertificate,
+      Date validFrom,
+      Date validTill,
+      PKCS10CertificationRequest certificationRequest)
+      throws IOException, OperatorCreationException;
+
+
+  /**
+   * Approval Types for a certificate request.
+   */
+  enum ApprovalType {
+    KERBEROS_TRUSTED, /* The Request came from a DN using Kerberos Identity*/
+    MANUAL, /* Wait for a Human being to approve this certificate */
+    TESTING_AUTOMATIC /* For testing purpose, Automatic Approval. */
+  }
+
+}

+ 24 - 16
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/CertificateServer.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.hdds.security.x509.certificate.authority;
 
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.SecurityConfig;
-import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
 import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
 
 import java.io.IOException;
 import java.security.cert.CertificateException;
@@ -51,7 +51,7 @@ public interface CertificateServer {
    * @return X509CertificateHolder - Certificate for this CA.
    * @throws CertificateException - usually thrown if this CA is not
    *                              initialized.
-   * @throws IOException - on Error.
+   * @throws IOException          - on Error.
    */
   X509CertificateHolder getCACertificate()
       throws CertificateException, IOException;
@@ -59,15 +59,31 @@ public interface CertificateServer {
   /**
    * Request a Certificate based on Certificate Signing Request.
    *
-   * @param csr - Certificate Signing Request.
-   * @param approver - An Enum which says what kind of approval process to
-   * follow.
+   * @param csr  - Certificate Signing Request.
+   * @param type - An Enum which says what kind of approval process to follow.
    * @return A future that will have this certificate when this request is
    * approved.
    * @throws SCMSecurityException - on Error.
    */
-  Future<X509CertificateHolder> requestCertificate(CertificateSignRequest csr,
-      CertificateApprover approver) throws SCMSecurityException;
+  Future<X509CertificateHolder>
+      requestCertificate(PKCS10CertificationRequest csr, CertificateApprover.ApprovalType type)
+      throws SCMSecurityException;
+
+
+  /**
+   * Request a Certificate based on Certificate Signing Request.
+   *
+   * @param csr - Certificate Signing Request as a PEM encoded String.
+   * @param type - An Enum which says what kind of approval process to follow.
+   * @return A future that will have this certificate when this request is
+   * approved.
+   * @throws SCMSecurityException - on Error.
+   */
+  Future<X509CertificateHolder>
+      requestCertificate(String csr, CertificateApprover.ApprovalType type)
+      throws IOException;
+
+
 
   /**
    * Revokes a Certificate issued by this CertificateServer.
@@ -78,21 +94,13 @@ public interface CertificateServer {
    * @throws SCMSecurityException - on Error.
    */
   Future<Boolean> revokeCertificate(X509Certificate certificate,
-      CertificateApprover approver) throws SCMSecurityException;
+      CertificateApprover.ApprovalType approver) throws SCMSecurityException;
 
   /**
    * TODO : CRL, OCSP etc. Later. This is the start of a CertificateServer
    * framework.
    */
 
-  /**
-   * Approval Types for a certificate request.
-   */
-  enum CertificateApprover {
-    KERBEROS_TRUSTED, /* The Request came from a DN using Kerberos Identity*/
-    MANUAL, /* Wait for a Human being to approve this certificate */
-    TESTING_AUTOMATIC /* For testing purpose, Automatic Approval. */
-  }
 
   /**
    * Make it explicit what type of CertificateServer we are creating here.

+ 128 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultApprover.java

@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.commons.lang3.RandomUtils;
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.PKIProfile;
+import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
+import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.cert.X509v3CertificateBuilder;
+import org.bouncycastle.crypto.params.AsymmetricKeyParameter;
+import org.bouncycastle.crypto.params.RSAKeyParameters;
+import org.bouncycastle.crypto.util.PrivateKeyFactory;
+import org.bouncycastle.crypto.util.PublicKeyFactory;
+import org.bouncycastle.operator.ContentSigner;
+import org.bouncycastle.operator.DefaultDigestAlgorithmIdentifierFinder;
+import org.bouncycastle.operator.DefaultSignatureAlgorithmIdentifierFinder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.bc.BcRSAContentSignerBuilder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+
+import java.io.IOException;
+import java.math.BigInteger;
+import java.security.PrivateKey;
+import java.util.Date;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * Default Approver used the by the DefaultCA.
+ */
+public class DefaultApprover extends BaseApprover {
+
+  /**
+   * Constructs the Default Approver.
+   *
+   * @param pkiProfile - PKI Profile to use.
+   * @param config - Security Config
+   */
+  public DefaultApprover(PKIProfile pkiProfile, SecurityConfig config) {
+    super(pkiProfile, config);
+  }
+
+  /**
+   * Sign function signs a Certificate.
+   * @param config - Security Config.
+   * @param caPrivate - CAs private Key.
+   * @param caCertificate - CA Certificate.
+   * @param validFrom - Begin Da te
+   * @param validTill - End Date
+   * @param certificationRequest - Certification Request.
+   * @return Signed Certificate.
+   * @throws IOException - On Error
+   * @throws OperatorCreationException - on Error.
+   */
+  public  X509CertificateHolder sign(
+      SecurityConfig config,
+      PrivateKey caPrivate,
+      X509CertificateHolder caCertificate,
+      Date validFrom,
+      Date validTill,
+      PKCS10CertificationRequest certificationRequest)
+      throws IOException, OperatorCreationException {
+
+    AlgorithmIdentifier sigAlgId = new
+        DefaultSignatureAlgorithmIdentifierFinder().find(
+        config.getSignatureAlgo());
+    AlgorithmIdentifier digAlgId = new DefaultDigestAlgorithmIdentifierFinder()
+        .find(sigAlgId);
+
+    AsymmetricKeyParameter asymmetricKP = PrivateKeyFactory.createKey(caPrivate
+        .getEncoded());
+    SubjectPublicKeyInfo keyInfo =
+        certificationRequest.getSubjectPublicKeyInfo();
+
+    RSAKeyParameters rsa =
+        (RSAKeyParameters) PublicKeyFactory.createKey(keyInfo);
+    if (rsa.getModulus().bitLength() < config.getSize()) {
+      throw new SCMSecurityException("Key size is too small in certificate " +
+          "signing request");
+    }
+    X509v3CertificateBuilder certificateGenerator =
+        new X509v3CertificateBuilder(
+            caCertificate.getSubject(),
+            // When we do persistence we will check if the certificate number
+            // is a duplicate.
+            new BigInteger(RandomUtils.nextBytes(8)),
+            validFrom,
+            validTill,
+            certificationRequest.getSubject(), keyInfo);
+
+    ContentSigner sigGen = new BcRSAContentSignerBuilder(sigAlgId, digAlgId)
+        .build(asymmetricKP);
+
+    return certificateGenerator.build(sigGen);
+
+  }
+
+  @Override
+  public CompletableFuture<X509CertificateHolder> approve(String csr)
+      throws IOException {
+    return super.approve(csr);
+  }
+
+  @Override
+  public CompletableFuture<X509CertificateHolder>
+      approve(PKCS10CertificationRequest csr) {
+    return super.approve(csr);
+  }
+}

+ 82 - 18
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java

@@ -23,12 +23,15 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultProfile;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.PKIProfile;
 import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
-import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
 import org.apache.hadoop.hdds.security.x509.certificates.utils.SelfSignedCertificate;
 import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
 import org.apache.hadoop.hdds.security.x509.keys.KeyCodec;
 import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -41,12 +44,16 @@ import java.security.NoSuchAlgorithmException;
 import java.security.NoSuchProviderException;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
+import java.security.spec.InvalidKeySpecException;
 import java.time.LocalDate;
 import java.time.LocalDateTime;
 import java.time.LocalTime;
+import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.Future;
 import java.util.function.Consumer;
 
+import static org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest.*;
+
 /**
  * The default CertificateServer used by SCM. This has no dependencies on any
  * external system, this allows us to bootstrap a CertificateServer from
@@ -103,6 +110,11 @@ public class DefaultCAServer implements CertificateServer {
   private Path caKeysPath;
   private Path caRootX509Path;
   private SecurityConfig config;
+  /**
+   * TODO: We will make these configurable in the future.
+   */
+  private PKIProfile profile;
+  private CertificateApprover approver;
 
   /**
    * Create an Instance of DefaultCAServer.
@@ -124,6 +136,11 @@ public class DefaultCAServer implements CertificateServer {
     caRootX509Path = securityConfig.getCertificateLocation(componentName);
     this.config = securityConfig;
 
+    // TODO: Make these configurable and load different profiles based on
+    // config.
+    profile = new DefaultProfile();
+    this.approver = new DefaultApprover(profile, this.config);
+
     /* In future we will spilt this code to have different kind of CAs.
      * Right now, we have only self-signed CertificateServer.
      */
@@ -141,23 +158,76 @@ public class DefaultCAServer implements CertificateServer {
   }
 
   @Override
-  public X509CertificateHolder getCACertificate() throws
-      CertificateException, IOException {
+  public X509CertificateHolder getCACertificate() throws IOException {
     CertificateCodec certificateCodec =
         new CertificateCodec(config, componentName);
-    return certificateCodec.readCertificate();
+    try {
+      return certificateCodec.readCertificate();
+    } catch (CertificateException e) {
+      throw new IOException(e);
+    }
+  }
+
+  private KeyPair getCAKeys() throws IOException {
+    KeyCodec keyCodec = new KeyCodec(config, componentName);
+    try {
+      return new KeyPair(keyCodec.readPublicKey(), keyCodec.readPrivateKey());
+    } catch (InvalidKeySpecException | NoSuchAlgorithmException e) {
+      throw new IOException(e);
+    }
   }
 
   @Override
   public Future<X509CertificateHolder> requestCertificate(
-      CertificateSignRequest csr, CertificateApprover approver)
-      throws SCMSecurityException {
-    return null;
+      PKCS10CertificationRequest csr, CertificateApprover.ApprovalType approverType) {
+    LocalDate beginDate = LocalDate.now().atStartOfDay().toLocalDate();
+    LocalDateTime temp = LocalDateTime.of(beginDate, LocalTime.MIDNIGHT);
+    LocalDate endDate =
+        temp.plus(config.getDefaultCertDuration()).toLocalDate();
+
+    CompletableFuture<X509CertificateHolder> xcertHolder =
+        approver.approve(csr);
+
+    if(xcertHolder.isCompletedExceptionally()) {
+      // This means that approver told us there are things which it disagrees
+      // with in this Certificate Request. Since the first set of sanity
+      // checks failed, we just return the future object right here.
+      return xcertHolder;
+    }
+    try {
+      switch (approverType) {
+      case MANUAL:
+        xcertHolder.completeExceptionally(new SCMSecurityException("Manual " +
+            "approval is not yet implemented."));
+        break;
+      case KERBEROS_TRUSTED:
+      case TESTING_AUTOMATIC:
+        X509CertificateHolder xcert = approver.sign(config,
+            getCAKeys().getPrivate(),
+            getCACertificate(), java.sql.Date.valueOf(beginDate),
+            java.sql.Date.valueOf(endDate), csr);
+        xcertHolder.complete(xcert);
+        break;
+      default:
+        return null; // cannot happen, keeping checkstyle happy.
+      }
+    } catch (IOException | OperatorCreationException e) {
+      xcertHolder.completeExceptionally(new SCMSecurityException(e));
+    }
+    return xcertHolder;
+  }
+
+  @Override
+  public Future<X509CertificateHolder> requestCertificate(String csr,
+      CertificateApprover.ApprovalType type) throws IOException {
+    PKCS10CertificationRequest request =
+        getCertificationRequest(csr);
+    return requestCertificate(request, type);
   }
 
   @Override
   public Future<Boolean> revokeCertificate(X509Certificate certificate,
-      CertificateApprover approver) throws SCMSecurityException {
+      CertificateApprover.ApprovalType approverType) throws SCMSecurityException {
     return null;
   }
 
@@ -227,11 +297,8 @@ public class DefaultCAServer implements CertificateServer {
       return false;
     }
 
-    if (!Files.exists(Paths.get(caKeysPath.toString(),
-        this.config.getPrivateKeyFileName()))) {
-      return false;
-    }
-    return true;
+    return Files.exists(Paths.get(caKeysPath.toString(),
+        this.config.getPrivateKeyFileName()));
   }
 
   /**
@@ -243,11 +310,8 @@ public class DefaultCAServer implements CertificateServer {
     if (!Files.exists(caRootX509Path)) {
       return false;
     }
-    if (!Files.exists(Paths.get(caRootX509Path.toString(),
-        this.config.getCertificateFileName()))) {
-      return false;
-    }
-    return true;
+    return Files.exists(Paths.get(caRootX509Path.toString(),
+        this.config.getCertificateFileName()));
   }
 
   /**

+ 46 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultCAProfile.java

@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;
+
+import org.bouncycastle.asn1.x509.Extension;
+
+import java.util.function.BiFunction;
+
+import static java.lang.Boolean.TRUE;
+
+/**
+ * CA Profile, this is needed when SCM does HA.
+ * A place holder class indicating what we need to do when we support issuing
+ * CA certificates to other SCMs in HA mode.
+ */
+public class DefaultCAProfile extends DefaultProfile {
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_BASIC_CONSTRAINTS = (e, b) -> TRUE;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_CRL_NUMBER = (e, b) -> TRUE;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_REASON_CODE = (e, b) -> TRUE;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_DELTA_CRL_INDICATOR = (e, b) -> TRUE;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_NAME_CONSTRAINTS = (e, b) -> TRUE;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_CRL_DISTRIBUTION_POINTS = (e, b) -> TRUE;
+}

+ 333 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/DefaultProfile.java

@@ -0,0 +1,333 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.validator.routines.DomainValidator;
+import org.bouncycastle.asn1.ASN1ObjectIdentifier;
+import org.bouncycastle.asn1.x500.RDN;
+import org.bouncycastle.asn1.x509.ExtendedKeyUsage;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.GeneralName;
+import org.bouncycastle.asn1.x509.GeneralNames;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.asn1.x509.KeyUsage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.bind.DatatypeConverter;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.AbstractMap.SimpleEntry;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.BiFunction;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static java.lang.Boolean.TRUE;
+import static org.bouncycastle.asn1.x509.KeyPurposeId.id_kp_clientAuth;
+import static org.bouncycastle.asn1.x509.KeyPurposeId.id_kp_serverAuth;
+
+/**
+ * Ozone PKI profile.
+ * <p>
+ * This PKI profile is invoked by SCM CA to make sure that certificates issued
+ * by SCM CA are constrained
+ */
+public class DefaultProfile implements PKIProfile {
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_KEY_USAGE = DefaultProfile::validateKeyUsage;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_AUTHORITY_KEY_IDENTIFIER = (e, b) -> TRUE;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_LOGO_TYPE = (e, b) -> TRUE;
+  private static final Logger LOG =
+      LoggerFactory.getLogger(DefaultProfile.class);
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_SAN = DefaultProfile::validateSubjectAlternativeName;
+  static final BiFunction<Extension, PKIProfile, Boolean>
+      VALIDATE_EXTENDED_KEY_USAGE = DefaultProfile::validateExtendedKeyUsage;
+  // If we decide to add more General Names, we should add those here and
+  // also update the logic in validateGeneralName function.
+  private static final int[] GENERAL_NAMES = {
+      GeneralName.dNSName,
+      GeneralName.iPAddress,
+  };
+  // Map that handles all the Extensions lookup and validations.
+  private static final Map<ASN1ObjectIdentifier, BiFunction<Extension,
+      PKIProfile, Boolean>> EXTENSIONS_MAP = Stream.of(
+      new SimpleEntry<>(Extension.keyUsage, VALIDATE_KEY_USAGE),
+      new SimpleEntry<>(Extension.subjectAlternativeName, VALIDATE_SAN),
+      new SimpleEntry<>(Extension.authorityKeyIdentifier,
+          VALIDATE_AUTHORITY_KEY_IDENTIFIER),
+      new SimpleEntry<>(Extension.extendedKeyUsage,
+          VALIDATE_EXTENDED_KEY_USAGE),
+      // Ozone certs are issued only for the use of Ozone.
+      // However, some users will discover that this is a full scale CA
+      // and decide to mis-use these certs for other purposes.
+      // To discourage usage of these certs for other purposes, we can leave
+      // the Ozone Logo inside these certs. So if a browser is used to
+      // connect these logos will show up.
+      // https://www.ietf.org/rfc/rfc3709.txt
+      new SimpleEntry<>(Extension.logoType, VALIDATE_LOGO_TYPE))
+      .collect(Collectors.toMap(SimpleEntry::getKey,
+          SimpleEntry::getValue));
+  // If we decide to add more General Names, we should add those here and
+  // also update the logic in validateGeneralName function.
+  private static final KeyPurposeId[] EXTENDED_KEY_USAGE = {
+      id_kp_serverAuth, // TLS Web server authentication
+      id_kp_clientAuth, // TLS Web client authentication
+
+  };
+  private final Set<KeyPurposeId> extendKeyPurposeSet;
+  private Set<Integer> generalNameSet;
+
+  /**
+   * Construct DefaultProfile.
+   */
+  public DefaultProfile() {
+    generalNameSet = new HashSet<>();
+    for (int val : GENERAL_NAMES) {
+      generalNameSet.add(val);
+    }
+    extendKeyPurposeSet =
+        new HashSet<>(Arrays.asList(EXTENDED_KEY_USAGE));
+
+  }
+
+  /**
+   * This function validates that the KeyUsage Bits are subset of the Bits
+   * permitted by the ozone profile.
+   *
+   * @param ext - KeyUsage Extension.
+   * @param profile - PKI Profile - In this case this profile.
+   * @return True, if the request key usage is a subset, false otherwise.
+   */
+  private static Boolean validateKeyUsage(Extension ext, PKIProfile profile) {
+    KeyUsage keyUsage = profile.getKeyUsage();
+    KeyUsage requestedUsage = KeyUsage.getInstance(ext.getParsedValue());
+    BitSet profileBitSet = BitSet.valueOf(keyUsage.getBytes());
+    BitSet requestBitSet = BitSet.valueOf(requestedUsage.getBytes());
+    // Check if the requestBitSet is a subset of profileBitSet
+    //  p & r == r should be equal if it is a subset.
+    profileBitSet.and(requestBitSet);
+    return profileBitSet.equals(requestBitSet);
+  }
+
+  /**
+   * Validates the SubjectAlternative names in the Certificate.
+   *
+   * @param ext - Extension - SAN, which allows us to get the SAN names.
+   * @param profile - This profile.
+   * @return - True if the request contains only SANs, General names that we
+   * support. False otherwise.
+   */
+  private static Boolean validateSubjectAlternativeName(Extension ext,
+      PKIProfile profile) {
+    if (ext.isCritical()) {
+      // SAN extensions should not be marked as critical under ozone profile.
+      LOG.error("SAN extension marked as critical in the Extension. {}",
+          GeneralNames.getInstance(ext.getParsedValue()).toString());
+      return false;
+    }
+    GeneralNames generalNames = GeneralNames.getInstance(ext.getParsedValue());
+    for (GeneralName name : generalNames.getNames()) {
+      try {
+        if (!profile.validateGeneralName(name.getTagNo(),
+            name.getName().toString())) {
+          return false;
+        }
+      } catch (UnknownHostException e) {
+        LOG.error("IP address validation failed."
+            + name.getName().toString(), e);
+        return false;
+      }
+    }
+    return true;
+  }
+
+  /**
+   * This function validates that the KeyUsage Bits are subset of the Bits
+   * permitted by the ozone profile.
+   *
+   * @param ext - KeyUsage Extension.
+   * @param profile - PKI Profile - In this case this profile.
+   * @return True, if the request key usage is a subset, false otherwise.
+   */
+  private static Boolean validateExtendedKeyUsage(Extension ext,
+      PKIProfile profile) {
+    if (ext.isCritical()) {
+      // https://tools.ietf.org/html/rfc5280#section-4.2.1.12
+      // Ozone profile opts to mark this extension as non-critical.
+      LOG.error("Extended Key usage marked as critical.");
+      return false;
+    }
+    ExtendedKeyUsage extendedKeyUsage =
+        ExtendedKeyUsage.getInstance(ext.getParsedValue());
+    for (KeyPurposeId id : extendedKeyUsage.getUsages()) {
+      if (!profile.validateExtendedKeyUsage(id)) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public int[] getGeneralNames() {
+    return Arrays.copyOfRange(GENERAL_NAMES, 0, GENERAL_NAMES.length);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean isSupportedGeneralName(int generalName) {
+    return generalNameSet.contains(generalName);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean validateGeneralName(int type, String value) {
+    // TODO : We should add more validation for IP address, for example
+    //  it matches the local network, and domain matches where the cluster
+    //  exits.
+    if (!isSupportedGeneralName(type)) {
+      return false;
+    }
+    switch (type) {
+    case GeneralName.iPAddress:
+
+      // We need DatatypeConverter conversion, since the original CSR encodes
+      // an IP address int a Hex String, for example 8.8.8.8 is encoded as
+      // #08080808. Value string is always preceded by "#", we will strip
+      // that before passing it on.
+
+      // getByAddress call converts the IP address to hostname/ipAddress format.
+      // if the hostname cannot determined then it will be /ipAddress.
+
+      // TODO: Fail? if we cannot resolve the Hostname?
+      try {
+        final InetAddress byAddress = InetAddress.getByAddress(
+            DatatypeConverter.parseHexBinary(value.substring(1)));
+        LOG.debug("Host Name/IP Address : {}", byAddress.toString());
+        return true;
+      } catch (UnknownHostException e) {
+        return false;
+      }
+    case GeneralName.dNSName:
+      return DomainValidator.getInstance().isValid(value);
+    default:
+      // This should not happen, since it guarded via isSupportedGeneralName.
+      LOG.error("Unexpected type in General Name (int value) : " + type);
+      return false;
+    }
+  }
+
+  @Override
+  public boolean validateExtendedKeyUsage(KeyPurposeId id) {
+    return extendKeyPurposeSet.contains(id);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public ASN1ObjectIdentifier[] getSupportedExtensions() {
+    return EXTENSIONS_MAP.keySet().toArray(new ASN1ObjectIdentifier[0]);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean isSupportedExtension(Extension extension) {
+    return EXTENSIONS_MAP.containsKey(extension.getExtnId());
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean validateExtension(Extension extension) {
+    Preconditions.checkNotNull(extension, "Extension cannot be null");
+
+    if (!isSupportedExtension(extension)) {
+      LOG.error("Unsupported Extension found: {} ",
+          extension.getExtnId().getId());
+      return false;
+    }
+
+    BiFunction<Extension, PKIProfile, Boolean> func =
+        EXTENSIONS_MAP.get(extension.getExtnId());
+
+    if (func != null) {
+      return func.apply(extension, this);
+    }
+    return false;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public KeyUsage getKeyUsage() {
+    return new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyEncipherment
+        | KeyUsage.dataEncipherment | KeyUsage.keyAgreement);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public RDN[] getRDNs() {
+    return new RDN[0];
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean isValidRDN(RDN distinguishedName) {
+    // TODO: Right now we just approve all strings.
+    return true;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean validateRDN(RDN name) {
+    return true;
+  }
+
+  @Override
+  public boolean isCA() {
+    return false;
+  }
+}

+ 140 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/PKIProfile.java

@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;
+
+import org.bouncycastle.asn1.ASN1ObjectIdentifier;
+import org.bouncycastle.asn1.x500.RDN;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.asn1.x509.KeyUsage;
+
+import java.net.UnknownHostException;
+
+/**
+ * Base class for profile rules. Generally profiles are documents that define
+ * the PKI policy. In HDDS/Ozone world, we have chosen to make PKIs
+ * executable code. So if an end-user wants to use a custom profile or one of
+ * the existing profile like the list below, they are free to implement a
+ * custom profile.
+ *
+ *     PKIX - Internet PKI profile.
+ *     FPKI - (US) Federal PKI profile.
+ *     MISSI - US DoD profile.
+ *     ISO 15782 - Banking - Certificate Management Part 1: Public Key
+ *         Certificates.
+ *     TeleTrust/MailTrusT - German MailTrusT profile for TeleTrusT (it
+ *     really is
+ *         capitalised that way).
+ *     German SigG Profile - Profile to implement the German digital
+ *     signature law
+ *     ISIS Profile - Another German profile.
+ *     Australian Profile - Profile for the Australian PKAF
+ *     SS 61 43 31 Electronic ID Certificate - Swedish profile.
+ *     FINEID S3 - Finnish profile.
+ *     ANX Profile - Automotive Network Exchange profile.
+ *     Microsoft Profile - This isn't a real profile, but windows uses this.
+ */
+public interface PKIProfile {
+
+  /**
+   * Returns the list of General Names  supported by this profile.
+   * @return - an Array of supported General Names by this certificate profile.
+   */
+  int[] getGeneralNames();
+
+  /**
+   * Checks if a given General Name is permitted in this profile.
+   * @param generalName - General name.
+   * @return true if it is allowed, false otherwise.
+   */
+  boolean isSupportedGeneralName(int generalName);
+
+  /**
+   * Allows the profile to dictate what value ranges are valid.
+   * @param type - Type of the General Name.
+   * @param value - Value of the General Name.
+   * @return - true if the value is permitted, false otherwise.
+   * @throws UnknownHostException - on Error in IP validation.
+   */
+  boolean validateGeneralName(int type, String value)
+      throws UnknownHostException;
+
+  /**
+   * Returns an array of Object identifiers for extensions supported by this
+   * profile.
+   * @return an Array of ASN1ObjectIdentifier for the supported extensions.
+   */
+  ASN1ObjectIdentifier[] getSupportedExtensions();
+
+  /**
+   * Checks if the this extension is permitted in this profile.
+   * @param extension - Extension to check for.
+   * @return - true if this extension is supported, false otherwise.
+   */
+  boolean isSupportedExtension(Extension extension);
+
+  /**
+   * Checks if the extension has the value which this profile approves.
+   * @param extension - Extension to validate.
+   * @return - True if the extension is acceptable, false otherwise.
+   */
+  boolean validateExtension(Extension extension);
+
+  /**
+   * Validate the Extended Key Usage.
+   * @param id - KeyPurpose ID
+   * @return true, if this is a supported Purpose, false otherwise.
+   */
+  boolean validateExtendedKeyUsage(KeyPurposeId id);
+
+  /**
+   * Returns the permitted Key usage mask while using this profile.
+   * @return KeyUsage
+   */
+  KeyUsage getKeyUsage();
+
+  /**
+   * Gets the supported list of RDNs supported by this profile.
+   * @return Array of RDNs.
+   */
+  RDN[] getRDNs();
+
+  /**
+   * Returns true if this Relative Distinguished Name component is allowed in
+   * this profile.
+   * @param distinguishedName - RDN to check.
+   * @return boolean, True if this RDN is allowed, false otherwise.
+   */
+  boolean isValidRDN(RDN distinguishedName);
+
+  /**
+   * Allows the profile to control the value set of the RDN. Profile can
+   * reject a RDN name if needed.
+   * @param name - RDN.
+   * @return true if the name is acceptable to this profile, false otherwise.
+   */
+  boolean validateRDN(RDN name);
+
+  /**
+   * True if the profile we are checking is for issuing a CA certificate.
+   * @return  True, if the profile used is for CA, false otherwise.
+   */
+  boolean isCA();
+}

+ 33 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/PKIProfiles/package-info.java

@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/**
+ * PKI PKIProfile package supports different kind of profiles that certificates
+ * can support. If you are not familiar with PKI profiles, there is an
+ * excellent introduction at
+ *
+ * https://www.cs.auckland.ac.nz/~pgut001/pubs/x509guide.txt
+ *
+ * At high level, the profiles in this directory define what kinds of
+ * Extensions, General names , Key usage and critical extensions are
+ * permitted when the CA is functional.
+ *
+ * An excellent example of a profile would be ozone profile if you would
+ * like to see a reference to create your own profiles.
+ */
+package org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles;

+ 39 - 9
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java

@@ -34,14 +34,19 @@ import org.bouncycastle.asn1.x509.Extensions;
 import org.bouncycastle.asn1.x509.GeneralName;
 import org.bouncycastle.asn1.x509.GeneralNames;
 import org.bouncycastle.asn1.x509.KeyUsage;
+import org.bouncycastle.openssl.jcajce.JcaPEMWriter;
 import org.bouncycastle.operator.ContentSigner;
 import org.bouncycastle.operator.OperatorCreationException;
 import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
 import org.bouncycastle.pkcs.PKCS10CertificationRequest;
 import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder;
 import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
+import org.bouncycastle.util.io.pem.PemObject;
+import org.bouncycastle.util.io.pem.PemReader;
 
 import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
 import java.security.KeyPair;
 import java.util.ArrayList;
 import java.util.List;
@@ -70,7 +75,7 @@ public final class CertificateSignRequest {
    * @param extensions - CSR extensions
    */
   private CertificateSignRequest(String subject, String scmID, String clusterID,
-      KeyPair keyPair, SecurityConfig config, Extensions extensions) {
+                                 KeyPair keyPair, SecurityConfig config, Extensions extensions) {
     this.subject = subject;
     this.clusterID = clusterID;
     this.scmID = scmID;
@@ -97,6 +102,35 @@ public final class CertificateSignRequest {
     }
     return p10Builder.build(contentSigner);
   }
+  public static String getEncodedString(PKCS10CertificationRequest request)
+      throws IOException {
+    PemObject pemObject =
+        new PemObject("CERTIFICATE REQUEST", request.getEncoded());
+    StringWriter str = new StringWriter();
+    try(JcaPEMWriter pemWriter = new JcaPEMWriter(str)) {
+      pemWriter.writeObject(pemObject);
+    }
+    return str.toString();
+  }
+
+
+  /**
+   * Gets a CertificateRequest Object from PEM encoded CSR.
+   *
+   * @param csr - PEM Encoded Certificate Request String.
+   * @return PKCS10CertificationRequest
+   * @throws IOException - On Error.
+   */
+  public static PKCS10CertificationRequest getCertificationRequest(String csr)
+      throws IOException {
+    try (PemReader reader = new PemReader(new StringReader(csr))) {
+      PemObject pemObject = reader.readPemObject();
+      if(pemObject.getContent() == null) {
+        throw new SCMSecurityException("Invalid Certificate signing request");
+      }
+      return new PKCS10CertificationRequest(pemObject.getContent());
+    }
+  }
 
   /**
    * Builder class for Certificate Sign Request.
@@ -144,12 +178,6 @@ public final class CertificateSignRequest {
       return this;
     }
 
-    public CertificateSignRequest.Builder addRfc822Name(String name) {
-      Preconditions.checkNotNull(name, "Rfc822Name cannot be null");
-      this.addAltName(GeneralName.rfc822Name, name);
-      return this;
-    }
-
     // IP address is subject to change which is optional for now.
     public CertificateSignRequest.Builder addIpAddress(String ip) {
       Preconditions.checkNotNull(ip, "Ip address cannot be null");
@@ -186,7 +214,7 @@ public final class CertificateSignRequest {
         IOException {
       if (altNames != null) {
         return Optional.of(new Extension(Extension.subjectAlternativeName,
-            true, new DEROctetString(new GeneralNames(
+            false, new DEROctetString(new GeneralNames(
             altNames.toArray(new GeneralName[altNames.size()])))));
       }
       return Optional.empty();
@@ -202,7 +230,9 @@ public final class CertificateSignRequest {
       List<Extension> extensions = new ArrayList<>();
 
       // Add basic extension
-      extensions.add(getBasicExtension());
+      if(ca) {
+        extensions.add(getBasicExtension());
+      }
 
       // Add key usage extension
       extensions.add(getKeyUsageExtension());

+ 38 - 9
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/KeyCodec.java

@@ -47,6 +47,7 @@ import java.security.PrivateKey;
 import java.security.PublicKey;
 import java.security.spec.InvalidKeySpecException;
 import java.security.spec.PKCS8EncodedKeySpec;
+import java.security.spec.X509EncodedKeySpec;
 import java.util.Set;
 import java.util.function.Supplier;
 import java.util.stream.Collectors;
@@ -209,10 +210,22 @@ public class KeyCodec {
       throws InvalidKeySpecException, NoSuchAlgorithmException, IOException {
     PKCS8EncodedKeySpec encodedKeySpec = readKey(basePath, privateKeyFileName);
     final KeyFactory keyFactory =
-        KeyFactory.getInstance(securityConfig.getProvider());
-    final PrivateKey privateKey =
+        KeyFactory.getInstance(securityConfig.getKeyAlgo());
+    return
         keyFactory.generatePrivate(encodedKeySpec);
-    return privateKey;
+  }
+
+  /**
+   * Read the Public Key using defaults.
+   * @return PublicKey.
+   * @throws InvalidKeySpecException - On Error.
+   * @throws NoSuchAlgorithmException - On Error.
+   * @throws IOException - On Error.
+   */
+  public PublicKey readPublicKey() throws InvalidKeySpecException,
+      NoSuchAlgorithmException, IOException {
+    return readPublicKey(this.location.toAbsolutePath(),
+        securityConfig.getPublicKeyFileName());
   }
 
   /**
@@ -229,12 +242,28 @@ public class KeyCodec {
       throws NoSuchAlgorithmException, InvalidKeySpecException, IOException {
     PKCS8EncodedKeySpec encodedKeySpec = readKey(basePath, publicKeyFileName);
     final KeyFactory keyFactory =
-        KeyFactory.getInstance(securityConfig.getProvider());
-    final PublicKey publicKey =
-        keyFactory.generatePublic(encodedKeySpec);
-    return publicKey;
+        KeyFactory.getInstance(securityConfig.getKeyAlgo());
+    return
+        keyFactory.generatePublic(
+            new X509EncodedKeySpec(encodedKeySpec.getEncoded()));
+
   }
 
+
+  /**
+   * Returns the private key  using defaults.
+   * @return PrivateKey.
+   * @throws InvalidKeySpecException - On Error.
+   * @throws NoSuchAlgorithmException - On Error.
+   * @throws IOException - On Error.
+   */
+  public PrivateKey readPrivateKey() throws InvalidKeySpecException,
+      NoSuchAlgorithmException, IOException {
+    return readPrivateKey(this.location.toAbsolutePath(),
+        securityConfig.getPrivateKeyFileName());
+  }
+
+
   /**
    * Helper function that actually writes data to the files.
    *
@@ -246,7 +275,7 @@ public class KeyCodec {
    * @throws IOException - On I/O failure.
    */
   private synchronized void writeKey(Path basePath, KeyPair keyPair,
-      String privateKeyFileName, String publicKeyFileName, boolean force)
+                                     String privateKeyFileName, String publicKeyFileName, boolean force)
       throws IOException {
     checkPreconditions(basePath);
 
@@ -282,7 +311,7 @@ public class KeyCodec {
    * @throws IOException - On I/O failure.
    */
   private void checkKeyFile(File privateKeyFile, boolean force,
-      File publicKeyFile) throws IOException {
+                            File publicKeyFile) throws IOException {
     if (privateKeyFile.exists() && force) {
       if (!privateKeyFile.delete()) {
         throw new IOException("Unable to delete private key file.");

+ 76 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java

@@ -19,7 +19,81 @@
 
 
 /**
- * This package contains common routines used in creating an x509 based
- * identity framework for HDDS.
+ * This package contains common routines used in creating an x509 based identity
+ * framework for HDDS.
  */
 package org.apache.hadoop.hdds.security.x509;
+/*
+
+Architecture of Certificate Infrastructure for SCM.
+====================================================
+
+The certificate infrastructure has two main parts, the certificate server or
+the Certificate authority and the clients who want certificates. The CA is
+responsible for issuing certificates to participating entities.
+
+To issue a certificate the CA has to verify the identity and the assertions
+in the certificate. The client starts off making a request to CA for a
+certificate.  This request is called Certificate Signing Request or CSR
+(PKCS#10).
+
+When a CSR arrives on the CA, CA will decode the CSR and verify that all the
+fields in the CSR are in line with what the system expects. Since there are
+lots of possible ways to construct an X.509 certificate, we rely on PKI
+profiles.
+
+Generally, PKI profiles are policy documents or general guidelines that get
+followed by the requester and CA. However, most of the PKI profiles that are
+commonly available are general purpose and offers too much surface area.
+
+SCM CA infrastructure supports the notion of a PKI profile class which can
+codify the RDNs, Extensions and other certificate policies. The CA when
+issuing a certificate will invoke a certificate approver class, based on the
+authentication method used. For example, out of the box, we support manual,
+Kerberos, trusted network and testing authentication mechanisms.
+
+If there is no authentication mechanism in place, then when CA receives the
+CSR, it runs the standard PKI profile over it verify that all the fields are
+in expected ranges. Once that is done, The signing request is sent for human
+review and approval. This form of certificate approval is called Manual,  Of
+all the certificate approval process this is the ** most secure **. This
+approval needs to be done once for each data node.
+
+For existing clusters, where data nodes already have a Kerberos keytab,  we
+can leverage the Kerberos identity mechanism to identify the data node that
+is requesting the certificate. In this case, users can configure the system
+to leverage Kerberos while issuing certificates and SCM CA will be able to
+verify the data nodes identity and issue certificates automatically.
+
+In environments like Kubernetes, we can leverage the base system services to
+pass on a shared secret securely. In this model also, we can rely on these
+secrets to make sure that is the right data node that is talking to us. This
+kind of approval is called a Trusted network approval. In this process, each
+data node not only sends the CSR but signs the request with a shared secret
+with SCM. SCM then can issue a certificate without the intervention of a
+human administrator.
+
+The last, TESTING method which never should be used other than in development and
+testing clusters, is merely a mechanism to bypass all identity checks. If
+this flag is setup, then CA will issue a CSR if the base approves all fields.
+
+ * Please do not use this mechanism(TESTING) for any purpose other than
+ * testing.
+
+CA - Certificate Approval and Code Layout (as of Dec, 1st, 2018)
+=================================================================
+The CA implementation ( as of now it is called DefaultCA) receives a CSR from
+ the network layer. The network also tells the system what approver type to
+ use, that is if Kerberos or Shared secrets mechanism is used, it reports
+ that to Default CA.
+
+The default CA instantiates the approver based on the type of the approver
+indicated by the network layer. This approver creates an instance of the PKI
+profile and passes each field from the certificate signing request. The PKI
+profile (as of today Dec 1st, 2018, we have one profile called Ozone profile)
+ verifies that each field in the CSR meets the approved set of values.
+
+Once the PKI Profile validates the request, it is either auto approved or
+queued for manual review.
+
+ */

+ 56 - 0
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java

@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.PKIProfile;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+
+import java.io.IOException;
+import java.security.PrivateKey;
+import java.util.Date;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * A test approver class that makes testing easier.
+ */
+public class MockApprover extends BaseApprover {
+
+  public MockApprover(PKIProfile pkiProfile, SecurityConfig config) {
+    super(pkiProfile, config);
+  }
+
+  @Override
+  public CompletableFuture<X509CertificateHolder>
+  approve(PKCS10CertificationRequest csr) {
+    return super.approve(csr);
+  }
+
+  @Override
+  public X509CertificateHolder sign(SecurityConfig config, PrivateKey caPrivate,
+                                    X509CertificateHolder caCertificate, Date validFrom,
+                                    Date validTill, PKCS10CertificationRequest certificationRequest)
+      throws IOException, OperatorCreationException {
+    return null;
+  }
+
+}

+ 51 - 0
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java

@@ -23,14 +23,22 @@ import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
+import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
 import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
 import java.io.IOException;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
 import java.security.cert.CertificateException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
 import java.util.function.Consumer;
 
 import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
@@ -113,6 +121,49 @@ public class TestDefaultCAServer {
       // exception.
       assertTrue(e.toString().contains("Missing Keys"));
     }
+  }
+
+  /**
+   * The most important test of this test suite. This tests that we are able
+   * to create a Test CA, creates it own self-Signed CA and then issue a
+   * certificate based on a CSR.
+   * @throws SCMSecurityException - on ERROR.
+   * @throws ExecutionException - on ERROR.
+   * @throws InterruptedException - on ERROR.
+   * @throws NoSuchProviderException - on ERROR.
+   * @throws NoSuchAlgorithmException - on ERROR.
+   */
+  @Test
+  public void testRequestCertificate() throws IOException,
+      ExecutionException, InterruptedException,
+      NoSuchProviderException, NoSuchAlgorithmException {
+    KeyPair keyPair =
+        new HDDSKeyGenerator(conf).generateKey();
+    PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+        .addDnsName("hadoop.apache.org")
+        .addIpAddress("8.8.8.8")
+        .setCA(false)
+        .setClusterID("ClusterID")
+        .setScmID("SCMID")
+        .setSubject("Ozone Cluster")
+        .setConfiguration(conf)
+        .setKey(keyPair)
+        .build();
 
+    // Let us convert this to a string to mimic the common use case.
+    String csrString = CertificateSignRequest.getEncodedString(csr);
+
+    CertificateServer testCA = new DefaultCAServer("testCA",
+        RandomStringUtils.randomAlphabetic(4),
+        RandomStringUtils.randomAlphabetic(4));
+    testCA.init(new SecurityConfig(conf),
+        CertificateServer.CAType.SELF_SIGNED_CA);
+
+    Future<X509CertificateHolder> holder = testCA.requestCertificate(csrString,
+        CertificateApprover.ApprovalType.TESTING_AUTOMATIC);
+    // Right now our calls are synchronous. Eventually this will have to wait.
+    assertTrue(holder.isDone());
+    assertNotNull(holder.get());
   }
+
 }

+ 361 - 0
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java

@@ -0,0 +1,361 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.hadoop.hdds.security.x509.certificate.authority;
+
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.PKIProfiles.DefaultProfile;
+import org.apache.hadoop.hdds.security.x509.certificates.utils.CertificateSignRequest;
+import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator;
+import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
+import org.bouncycastle.asn1.x500.X500Name;
+import org.bouncycastle.asn1.x500.X500NameBuilder;
+import org.bouncycastle.asn1.x500.style.BCStyle;
+import org.bouncycastle.asn1.x509.ExtendedKeyUsage;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.Extensions;
+import org.bouncycastle.asn1.x509.ExtensionsGenerator;
+import org.bouncycastle.asn1.x509.GeneralName;
+import org.bouncycastle.asn1.x509.GeneralNames;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.operator.ContentSigner;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
+import org.bouncycastle.pkcs.PKCS10CertificationRequest;
+import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder;
+import org.bouncycastle.pkcs.PKCSException;
+import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.IOException;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.NoSuchProviderException;
+
+import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Tests for the default PKI Profile.
+ */
+public class TestDefaultProfile {
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+  private OzoneConfiguration configuration;
+  private SecurityConfig securityConfig;
+  private DefaultProfile defaultProfile;
+  private MockApprover testApprover;
+  private KeyPair keyPair;
+
+  @Before
+  public void setUp() throws Exception {
+    configuration = new OzoneConfiguration();
+    configuration.set(OZONE_METADATA_DIRS,
+        temporaryFolder.newFolder().toString());
+    securityConfig = new SecurityConfig(configuration);
+    defaultProfile = new DefaultProfile();
+    testApprover = new MockApprover(defaultProfile,
+        securityConfig);
+    keyPair = new HDDSKeyGenerator(securityConfig).generateKey();
+  }
+
+  /**
+   * Tests the General Names that we support. The default profile supports only
+   * two names right now.
+   */
+  @Test
+  public void testisSupportedGeneralName() {
+// Positive tests
+    assertTrue(defaultProfile.isSupportedGeneralName(GeneralName.iPAddress));
+    assertTrue(defaultProfile.isSupportedGeneralName(GeneralName.dNSName));
+// Negative Tests
+    assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.directoryName));
+    assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.rfc822Name));
+    assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.otherName));
+  }
+
+  /**
+   * Test valid keys are validated correctly.
+   *
+   * @throws SCMSecurityException      - on Error.
+   * @throws PKCSException             - on Error.
+   * @throws OperatorCreationException - on Error.
+   */
+  @Test
+  public void testVerifyCertificate() throws SCMSecurityException,
+      PKCSException, OperatorCreationException {
+    PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+        .addDnsName("hadoop.apache.org")
+        .addIpAddress("8.8.8.8")
+        .setCA(false)
+        .setClusterID("ClusterID")
+        .setScmID("SCMID")
+        .setSubject("Ozone Cluster")
+        .setConfiguration(configuration)
+        .setKey(keyPair)
+        .build();
+    assertTrue(testApprover.verifyPkcs10Request(csr));
+  }
+
+
+
+
+  /**
+   * Test invalid keys fail in the validation.
+   *
+   * @throws SCMSecurityException      - on Error.
+   * @throws PKCSException             - on Error.
+   * @throws OperatorCreationException - on Error.
+   * @throws NoSuchProviderException   - on Error.
+   * @throws NoSuchAlgorithmException  - on Error.
+   */
+  @Test
+  public void testVerifyCertificateInvalidKeys() throws SCMSecurityException,
+      PKCSException, OperatorCreationException,
+      NoSuchProviderException, NoSuchAlgorithmException {
+    KeyPair newKeyPair = new HDDSKeyGenerator(securityConfig).generateKey();
+    KeyPair wrongKey = new KeyPair(keyPair.getPublic(),
+        newKeyPair.getPrivate());
+    PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+        .addDnsName("hadoop.apache.org")
+        .addIpAddress("8.8.8.8")
+        .setCA(false)
+        .setClusterID("ClusterID")
+        .setScmID("SCMID")
+        .setSubject("Ozone Cluster")
+        .setConfiguration(configuration)
+        .setKey(wrongKey)
+        .build();
+    // Signature verification should fail here, since the public/private key
+    // does not match.
+    assertFalse(testApprover.verifyPkcs10Request(csr));
+  }
+
+  /**
+   * Tests that normal valid extensions work with the default profile.
+   *
+   * @throws SCMSecurityException      - on Error.
+   * @throws PKCSException             - on Error.
+   * @throws OperatorCreationException - on Error.
+   */
+  @Test
+  public void testExtensions() throws SCMSecurityException {
+    PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+        .addDnsName("hadoop.apache.org")
+        .addIpAddress("192.10.234.6")
+        .setCA(false)
+        .setClusterID("ClusterID")
+        .setScmID("SCMID")
+        .setSubject("Ozone Cluster")
+        .setConfiguration(configuration)
+        .setKey(keyPair)
+        .build();
+    assertTrue(testApprover.verfiyExtensions(csr));
+  }
+
+  /**
+   * Tests that  invalid extensions cause a failure in validation. We will fail
+   * if CA extension is enabled.
+   *
+   * @throws SCMSecurityException - on Error.
+   */
+
+  @Test
+  public void testInvalidExtensionsWithCA() throws SCMSecurityException {
+    PKCS10CertificationRequest csr = new CertificateSignRequest.Builder()
+        .addDnsName("hadoop.apache.org")
+        .addIpAddress("192.10.234.6")
+        .setCA(true)
+        .setClusterID("ClusterID")
+        .setScmID("SCMID")
+        .setSubject("Ozone Cluster")
+        .setConfiguration(configuration)
+        .setKey(keyPair)
+        .build();
+    assertFalse(testApprover.verfiyExtensions(csr));
+  }
+
+  /**
+   * Tests that  invalid extensions cause a failure in validation. We will fail
+   * if rfc222 type names are added, we also add the extension as both
+   * critical and non-critical fashion to verify that the we catch both cases.
+   *
+   * @throws SCMSecurityException - on Error.
+   */
+
+  @Test
+  public void testInvalidExtensionsWithEmail() throws IOException,
+      OperatorCreationException {
+    Extensions emailExtension = getSANExtension(GeneralName.rfc822Name,"bilbo" +
+        "@apache.org", false);
+    PKCS10CertificationRequest csr = getInvalidCSR(keyPair, emailExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+
+    emailExtension = getSANExtension(GeneralName.rfc822Name,"bilbo" +
+        "@apache.org", true);
+     csr = getInvalidCSR(keyPair, emailExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+
+  }
+
+  /**
+   * Same test for URI.
+   * @throws IOException - On Error.
+   * @throws OperatorCreationException- on Error.
+   */
+  @Test
+  public void testInvalidExtensionsWithURI() throws IOException,
+      OperatorCreationException {
+    Extensions oExtension = getSANExtension(
+        GeneralName.uniformResourceIdentifier,"s3g.ozone.org", false);
+    PKCS10CertificationRequest csr = getInvalidCSR(keyPair, oExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+    oExtension = getSANExtension(GeneralName.uniformResourceIdentifier,
+        "s3g.ozone.org", false);
+    csr = getInvalidCSR(keyPair, oExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+  }
+
+  /**
+   * Assert that if DNS is marked critical our PKI profile will reject it.
+   * @throws IOException - on Error.
+   * @throws OperatorCreationException - on Error.
+   */
+  @Test
+  public void testInvalidExtensionsWithCriticalDNS() throws IOException,
+      OperatorCreationException {
+    Extensions dnsExtension = getSANExtension(GeneralName.dNSName,
+        "ozone.hadoop.org",
+        true);
+    PKCS10CertificationRequest csr = getInvalidCSR(keyPair, dnsExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+    // This tests should pass, hence the assertTrue
+    dnsExtension = getSANExtension(GeneralName.dNSName,
+        "ozone.hadoop.org",
+        false);
+    csr = getInvalidCSR(keyPair, dnsExtension);
+    assertTrue(testApprover.verfiyExtensions(csr));
+  }
+
+
+  /**
+   * Verify that valid Extended Key usage works as expected.
+   * @throws IOException - on Error.
+   * @throws OperatorCreationException - on Error.
+   */
+  @Test
+  public void testValidExtendedKeyUsage() throws IOException,
+      OperatorCreationException {
+    Extensions extendedExtension =
+        getKeyUsageExtension(KeyPurposeId.id_kp_clientAuth, false);
+    PKCS10CertificationRequest csr = getInvalidCSR(keyPair, extendedExtension);
+    assertTrue(testApprover.verfiyExtensions(csr));
+
+    extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_serverAuth, false);
+    csr = getInvalidCSR(keyPair, extendedExtension);
+    assertTrue(testApprover.verfiyExtensions(csr));
+  }
+
+
+  /**
+   * Verify that Invalid Extended Key usage works as expected, that is rejected.
+   * @throws IOException - on Error.
+   * @throws OperatorCreationException - on Error.
+   */
+  @Test
+  public void testInValidExtendedKeyUsage() throws IOException,
+      OperatorCreationException {
+    Extensions extendedExtension =
+        getKeyUsageExtension(KeyPurposeId.id_kp_clientAuth, true);
+    PKCS10CertificationRequest csr = getInvalidCSR(keyPair, extendedExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+
+    extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_OCSPSigning, false);
+    csr = getInvalidCSR(keyPair, extendedExtension);
+    assertFalse(testApprover.verfiyExtensions(csr));
+  }
+
+
+
+  /**
+   * Generates an CSR with the extension specified.
+   * This function is used to get an Invalid CSR and test that PKI profile
+   * rejects these invalid extensions, Hence the function name, by itself it
+   * is a well formed CSR, but our PKI profile will treat it as invalid CSR.
+   *
+   * @param keyPair - Key Pair.
+   * @return CSR  - PKCS10CertificationRequest
+   * @throws OperatorCreationException - on Error.
+   */
+  private PKCS10CertificationRequest getInvalidCSR(KeyPair keyPair,
+      Extensions extensions) throws OperatorCreationException {
+    X500NameBuilder namebuilder =
+        new X500NameBuilder(X500Name.getDefaultStyle());
+    namebuilder.addRDN(BCStyle.CN, "invalidCert");
+    PKCS10CertificationRequestBuilder p10Builder =
+        new JcaPKCS10CertificationRequestBuilder(namebuilder.build(),
+            keyPair.getPublic());
+    p10Builder.addAttribute(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest,
+        extensions);
+    JcaContentSignerBuilder csBuilder =
+        new JcaContentSignerBuilder(this.securityConfig.getSignatureAlgo());
+    ContentSigner signer = csBuilder.build(keyPair.getPrivate());
+    return p10Builder.build(signer);
+  }
+
+  /**
+   * Generate an Extension with rfc822Name.
+   * @param extensionCode - Extension Code.
+   * @param value  - email to be added to the certificate
+   * @param critical - boolean value that marks the extension as critical.
+   * @return - An Extension list with email address.
+   * @throws IOException
+   */
+  private Extensions getSANExtension(int extensionCode, String value,
+      boolean critical) throws IOException {
+    GeneralName extn = new GeneralName(extensionCode,
+        value);
+    ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator();
+    extensionsGenerator.addExtension(Extension.subjectAlternativeName, critical,
+        new GeneralNames(extn));
+    return extensionsGenerator.generate();
+  }
+
+  /**
+   * Returns a extension with Extended Key usage.
+   * @param purposeId - Usage that we want to encode.
+   * @param critical -  makes the extension critical.
+   * @return Extensions.
+   */
+  private Extensions getKeyUsageExtension(KeyPurposeId purposeId,
+      boolean critical) throws IOException {
+    ExtendedKeyUsage extendedKeyUsage = new ExtendedKeyUsage(purposeId);
+    ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator();
+    extensionsGenerator.addExtension(
+        Extension.extendedKeyUsage,critical, extendedKeyUsage);
+     return extensionsGenerator.generate();
+  }
+}

+ 0 - 6
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java

@@ -99,11 +99,6 @@ public class TestCertificateSignRequest {
     Assert.assertEquals(1, csr.getAttributes().length);
     Extensions extensions = SecurityUtil.getPkcs9Extensions(csr);
 
-    // Verify basic constraints extension
-    Extension basicExt = extensions.getExtension(Extension
-        .basicConstraints);
-    Assert.assertEquals(true, basicExt.isCritical());
-
     // Verify key usage extension
     Extension keyUsageExt = extensions.getExtension(Extension.keyUsage);
     Assert.assertEquals(true, keyUsageExt.isCritical());
@@ -144,7 +139,6 @@ public class TestCertificateSignRequest {
     builder.addIpAddress("192.168.2.1");
 
     builder.addDnsName("dn1.abc.com");
-    builder.addRfc822Name("test@abc.com");
 
     PKCS10CertificationRequest csr = builder.build();
 

+ 17 - 2
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/keys/TestKeyCodec.java

@@ -20,6 +20,7 @@
 package org.apache.hadoop.hdds.security.x509.keys;
 
 import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_METADATA_DIR_NAME;
+import static org.junit.Assert.assertNotNull;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
@@ -127,7 +128,7 @@ public class TestKeyCodec {
     byte[] keyBytes = Base64.decodeBase64(privateKeydata);
     PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
     PrivateKey privateKeyDecoded = kf.generatePrivate(spec);
-    Assert.assertNotNull("Private Key should not be null",
+    assertNotNull("Private Key should not be null",
         privateKeyDecoded);
 
     // Let us decode the public key and veriy that we can parse it back into
@@ -140,7 +141,7 @@ public class TestKeyCodec {
     keyBytes = Base64.decodeBase64(publicKeydata);
     X509EncodedKeySpec pubKeyspec = new X509EncodedKeySpec(keyBytes);
     PublicKey publicKeyDecoded = kf.generatePublic(pubKeyspec);
-    Assert.assertNotNull("Public Key should not be null",
+    assertNotNull("Public Key should not be null",
         publicKeyDecoded);
 
     // Now let us assert the permissions on the Directories and files are as
@@ -213,4 +214,18 @@ public class TestKeyCodec {
         .intercept(IOException.class, "Unsupported File System for pem file.",
             () -> pemWriter.writeKey(kp));
   }
+
+  @Test
+  public void testReadWritePublicKeywithoutArgs()
+      throws NoSuchProviderException, NoSuchAlgorithmException, IOException,
+      InvalidKeySpecException {
+
+    KeyPair kp = keyGenerator.generateKey();
+    KeyCodec keycodec = new KeyCodec(configuration);
+    keycodec.writeKey(kp);
+
+    PublicKey pubKey = keycodec.readPublicKey();
+    assertNotNull(pubKey);
+
+  }
 }