瀏覽代碼

HDDS-955. SCM CA: Add CA to SCM.
Contributed by Anu Engineer.

Anu Engineer 6 年之前
父節點
當前提交
30bfc9cbd0
共有 14 個文件被更改,包括 152 次插入67 次删除
  1. 3 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/BaseApprover.java
  2. 5 5
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/CertificateApprover.java
  3. 4 4
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultApprover.java
  4. 5 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java
  5. 2 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java
  6. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/package-info.java
  7. 5 4
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java
  8. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java
  9. 18 15
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java
  10. 23 0
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/package-info.java
  11. 3 2
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java
  12. 2 2
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java
  13. 59 25
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
  14. 20 0
      hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/package-info.java

+ 3 - 3
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/BaseApprover.java

@@ -173,9 +173,9 @@ public abstract class BaseApprover implements CertificateApprover {
    * {@inheritDoc}
    */
   @Override
-  public CompletableFuture<X509CertificateHolder> approve(String csr)
+  public CompletableFuture<X509CertificateHolder> inspectCSR(String csr)
       throws IOException {
-    return approve(CertificateSignRequest.getCertificationRequest(csr));
+    return inspectCSR(CertificateSignRequest.getCertificationRequest(csr));
   }
 
   /**
@@ -183,7 +183,7 @@ public abstract class BaseApprover implements CertificateApprover {
    */
   @Override
   public CompletableFuture<X509CertificateHolder>
-      approve(PKCS10CertificationRequest csr) {
+        inspectCSR(PKCS10CertificationRequest csr) {
     /**
      * The base approver executes the following algorithm to verify that a
      * CSR meets the PKI Profile criteria.

+ 5 - 5
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/CertificateApprover.java

@@ -30,7 +30,7 @@ import java.util.Date;
 import java.util.concurrent.CompletableFuture;
 
 /**
- * Certificate Approver interface is used to approve a certificate.
+ * Certificate Approver interface is used to inspectCSR a certificate.
  */
 interface CertificateApprover {
   /**
@@ -40,7 +40,7 @@ interface CertificateApprover {
    * @return - Future that will be contain the certificate or exception.
    */
   CompletableFuture<X509CertificateHolder>
-      approve(PKCS10CertificationRequest csr);
+      inspectCSR(PKCS10CertificationRequest csr);
 
   /**
    * Approves a Certificate Request based on the policies of this approver.
@@ -50,14 +50,14 @@ interface CertificateApprover {
    * @throws IOException - On Error.
    */
   CompletableFuture<X509CertificateHolder>
-      approve(String csr) throws IOException;
+      inspectCSR(String csr) throws IOException;
 
   /**
    * Sign function signs a Certificate.
    * @param config - Security Config.
    * @param caPrivate - CAs private Key.
    * @param caCertificate - CA Certificate.
-   * @param validFrom - Begin Da te
+   * @param validFrom - Begin Date
    * @param validTill - End Date
    * @param certificationRequest - Certification Request.
    * @return Signed Certificate.
@@ -79,7 +79,7 @@ interface CertificateApprover {
    */
   enum ApprovalType {
     KERBEROS_TRUSTED, /* The Request came from a DN using Kerberos Identity*/
-    MANUAL, /* Wait for a Human being to approve this certificate */
+    MANUAL, /* Wait for a Human being to inspect CSR of this certificate */
     TESTING_AUTOMATIC /* For testing purpose, Automatic Approval. */
   }
 

+ 4 - 4
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultApprover.java

@@ -115,14 +115,14 @@ public class DefaultApprover extends BaseApprover {
   }
 
   @Override
-  public CompletableFuture<X509CertificateHolder> approve(String csr)
+  public CompletableFuture<X509CertificateHolder> inspectCSR(String csr)
       throws IOException {
-    return super.approve(csr);
+    return super.inspectCSR(csr);
   }
 
   @Override
   public CompletableFuture<X509CertificateHolder>
-      approve(PKCS10CertificationRequest csr) {
-    return super.approve(csr);
+      inspectCSR(PKCS10CertificationRequest csr) {
+    return super.inspectCSR(csr);
   }
 }

+ 5 - 3
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/authority/DefaultCAServer.java

@@ -179,14 +179,15 @@ public class DefaultCAServer implements CertificateServer {
 
   @Override
   public Future<X509CertificateHolder> requestCertificate(
-      PKCS10CertificationRequest csr, CertificateApprover.ApprovalType approverType) {
+      PKCS10CertificationRequest csr,
+      CertificateApprover.ApprovalType approverType) {
     LocalDate beginDate = LocalDate.now().atStartOfDay().toLocalDate();
     LocalDateTime temp = LocalDateTime.of(beginDate, LocalTime.MIDNIGHT);
     LocalDate endDate =
         temp.plus(config.getDefaultCertDuration()).toLocalDate();
 
     CompletableFuture<X509CertificateHolder> xcertHolder =
-        approver.approve(csr);
+        approver.inspectCSR(csr);
 
     if(xcertHolder.isCompletedExceptionally()) {
       // This means that approver told us there are things which it disagrees
@@ -227,7 +228,8 @@ public class DefaultCAServer implements CertificateServer {
 
   @Override
   public Future<Boolean> revokeCertificate(X509Certificate certificate,
-      CertificateApprover.ApprovalType approverType) throws SCMSecurityException {
+      CertificateApprover.ApprovalType approverType)
+      throws SCMSecurityException {
     return null;
   }
 

+ 2 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/CertificateSignRequest.java

@@ -75,7 +75,8 @@ public final class CertificateSignRequest {
    * @param extensions - CSR extensions
    */
   private CertificateSignRequest(String subject, String scmID, String clusterID,
-                                 KeyPair keyPair, SecurityConfig config, Extensions extensions) {
+                                 KeyPair keyPair, SecurityConfig config,
+                                 Extensions extensions) {
     this.subject = subject;
     this.clusterID = clusterID;
     this.scmID = scmID;

+ 2 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificates/utils/package-info.java

@@ -17,6 +17,6 @@
  *
  */
 /**
- * Utils for Certificates.
+ Helpers for Certificates.
  */
-package org.apache.hadoop.hdds.security.x509.certificates.utils;
+package org.apache.hadoop.hdds.security.x509.certificates.utils;

+ 5 - 4
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/MockApprover.java

@@ -41,14 +41,15 @@ public class MockApprover extends BaseApprover {
 
   @Override
   public CompletableFuture<X509CertificateHolder>
-  approve(PKCS10CertificationRequest csr) {
-    return super.approve(csr);
+      inspectCSR(PKCS10CertificationRequest csr) {
+    return super.inspectCSR(csr);
   }
 
   @Override
   public X509CertificateHolder sign(SecurityConfig config, PrivateKey caPrivate,
-                                    X509CertificateHolder caCertificate, Date validFrom,
-                                    Date validTill, PKCS10CertificationRequest certificationRequest)
+                                    X509CertificateHolder caCertificate,
+                                    Date validFrom, Date validTill,
+                                    PKCS10CertificationRequest request)
       throws IOException, OperatorCreationException {
     return null;
   }

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultCAServer.java

@@ -166,4 +166,4 @@ public class TestDefaultCAServer {
     assertNotNull(holder.get());
   }
 
-}
+}

+ 18 - 15
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/authority/TestDefaultProfile.java

@@ -92,7 +92,8 @@ public class TestDefaultProfile {
     assertTrue(defaultProfile.isSupportedGeneralName(GeneralName.iPAddress));
     assertTrue(defaultProfile.isSupportedGeneralName(GeneralName.dNSName));
 // Negative Tests
-    assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.directoryName));
+    assertFalse(defaultProfile.isSupportedGeneralName(
+        GeneralName.directoryName));
     assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.rfc822Name));
     assertFalse(defaultProfile.isSupportedGeneralName(GeneralName.otherName));
   }
@@ -207,16 +208,16 @@ public class TestDefaultProfile {
    */
 
   @Test
-  public void testInvalidExtensionsWithEmail() throws IOException,
-      OperatorCreationException {
-    Extensions emailExtension = getSANExtension(GeneralName.rfc822Name,"bilbo" +
-        "@apache.org", false);
+  public void testInvalidExtensionsWithEmail()
+      throws IOException, OperatorCreationException {
+    Extensions emailExtension = getSANExtension(GeneralName.rfc822Name,
+        "bilbo@apache.org", false);
     PKCS10CertificationRequest csr = getInvalidCSR(keyPair, emailExtension);
     assertFalse(testApprover.verfiyExtensions(csr));
 
-    emailExtension = getSANExtension(GeneralName.rfc822Name,"bilbo" +
+    emailExtension = getSANExtension(GeneralName.rfc822Name, "bilbo" +
         "@apache.org", true);
-     csr = getInvalidCSR(keyPair, emailExtension);
+    csr = getInvalidCSR(keyPair, emailExtension);
     assertFalse(testApprover.verfiyExtensions(csr));
 
   }
@@ -230,7 +231,7 @@ public class TestDefaultProfile {
   public void testInvalidExtensionsWithURI() throws IOException,
       OperatorCreationException {
     Extensions oExtension = getSANExtension(
-        GeneralName.uniformResourceIdentifier,"s3g.ozone.org", false);
+        GeneralName.uniformResourceIdentifier, "s3g.ozone.org", false);
     PKCS10CertificationRequest csr = getInvalidCSR(keyPair, oExtension);
     assertFalse(testApprover.verfiyExtensions(csr));
     oExtension = getSANExtension(GeneralName.uniformResourceIdentifier,
@@ -274,7 +275,8 @@ public class TestDefaultProfile {
     PKCS10CertificationRequest csr = getInvalidCSR(keyPair, extendedExtension);
     assertTrue(testApprover.verfiyExtensions(csr));
 
-    extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_serverAuth, false);
+    extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_serverAuth,
+        false);
     csr = getInvalidCSR(keyPair, extendedExtension);
     assertTrue(testApprover.verfiyExtensions(csr));
   }
@@ -293,7 +295,8 @@ public class TestDefaultProfile {
     PKCS10CertificationRequest csr = getInvalidCSR(keyPair, extendedExtension);
     assertFalse(testApprover.verfiyExtensions(csr));
 
-    extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_OCSPSigning, false);
+    extendedExtension = getKeyUsageExtension(KeyPurposeId.id_kp_OCSPSigning,
+        false);
     csr = getInvalidCSR(keyPair, extendedExtension);
     assertFalse(testApprover.verfiyExtensions(csr));
   }
@@ -306,11 +309,11 @@ public class TestDefaultProfile {
    * rejects these invalid extensions, Hence the function name, by itself it
    * is a well formed CSR, but our PKI profile will treat it as invalid CSR.
    *
-   * @param keyPair - Key Pair.
+   * @param kPair - Key Pair.
    * @return CSR  - PKCS10CertificationRequest
    * @throws OperatorCreationException - on Error.
    */
-  private PKCS10CertificationRequest getInvalidCSR(KeyPair keyPair,
+  private PKCS10CertificationRequest getInvalidCSR(KeyPair kPair,
       Extensions extensions) throws OperatorCreationException {
     X500NameBuilder namebuilder =
         new X500NameBuilder(X500Name.getDefaultStyle());
@@ -355,7 +358,7 @@ public class TestDefaultProfile {
     ExtendedKeyUsage extendedKeyUsage = new ExtendedKeyUsage(purposeId);
     ExtensionsGenerator extensionsGenerator = new ExtensionsGenerator();
     extensionsGenerator.addExtension(
-        Extension.extendedKeyUsage,critical, extendedKeyUsage);
-     return extensionsGenerator.generate();
+        Extension.extendedKeyUsage, critical, extendedKeyUsage);
+    return extensionsGenerator.generate();
   }
-}
+}

+ 23 - 0
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/package-info.java

@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/**
+ Tests for Certificate helpers.
+ */
+package org.apache.hadoop.hdds.security.x509.certificate.utils;
+

+ 3 - 2
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestCertificateSignRequest.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.hdds.security.x509.certificate.utils;
+package org.apache.hadoop.hdds.security.x509.certificates;
 
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
@@ -274,6 +274,7 @@ public class TestCertificateSignRequest {
             .setClusterID(clusterID)
             .setKey(keyPair)
             .setConfiguration(conf);
+
     PKCS10CertificationRequest csr = builder.build();
     byte[] csrBytes = csr.getEncoded();
 
@@ -281,4 +282,4 @@ public class TestCertificateSignRequest {
     PKCS10CertificationRequest dsCsr = new PKCS10CertificationRequest(csrBytes);
     Assert.assertEquals(csr, dsCsr);
   }
-}
+}

+ 2 - 2
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/certificates/TestRootCertificate.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.hdds.security.x509.certificate.utils;
+package org.apache.hadoop.hdds.security.x509.certificates;
 
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
@@ -255,4 +255,4 @@ public class TestRootCertificate {
     // Assert that we can create a certificate with all sane params.
     Assert.assertNotNull(builder.build());
   }
-}
+}

+ 59 - 25
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java

@@ -33,8 +33,8 @@ import org.apache.hadoop.hdds.HddsUtils;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
-import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.hdds.scm.HddsServerUtil;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.hdds.scm.block.BlockManager;
 import org.apache.hadoop.hdds.scm.block.BlockManagerImpl;
 import org.apache.hadoop.hdds.scm.block.DeletedBlockLogImpl;
@@ -68,46 +68,49 @@ import org.apache.hadoop.hdds.scm.node.NodeReportHandler;
 import org.apache.hadoop.hdds.scm.node.SCMNodeManager;
 import org.apache.hadoop.hdds.scm.node.StaleNodeHandler;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineActionHandler;
-import org.apache.hadoop.hdds.scm.pipeline.SCMPipelineManager;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineManager;
 import org.apache.hadoop.hdds.scm.pipeline.PipelineReportHandler;
+import org.apache.hadoop.hdds.scm.pipeline.SCMPipelineManager;
+import org.apache.hadoop.hdds.security.x509.SecurityConfig;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateServer;
+import org.apache.hadoop.hdds.security.x509.certificate.authority.DefaultCAServer;
 import org.apache.hadoop.hdds.server.ServiceRuntimeInfoImpl;
 import org.apache.hadoop.hdds.server.events.EventPublisher;
 import org.apache.hadoop.hdds.server.events.EventQueue;
-import org.apache.hadoop.ozone.OzoneSecurityUtil;
-import org.apache.hadoop.ozone.protocol.commands.RetriableDatanodeEventWatcher;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
+import org.apache.hadoop.ozone.OzoneSecurityUtil;
 import org.apache.hadoop.ozone.common.Storage.StorageState;
 import org.apache.hadoop.ozone.common.StorageInfo;
 import org.apache.hadoop.ozone.lease.LeaseManager;
+import org.apache.hadoop.ozone.protocol.commands.RetriableDatanodeEventWatcher;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.JvmPauseMonitor;
 import org.apache.hadoop.util.StringUtils;
 
-import static org.apache.hadoop.hdds.scm.ScmConfigKeys
-    .HDDS_SCM_WATCHER_TIMEOUT_DEFAULT;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.management.ObjectName;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.TimeUnit;
-
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_WATCHER_TIMEOUT_DEFAULT;
 import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
 import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_KERBEROS_PRINCIPAL_KEY;
 import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_SCM_KERBEROS_KEYTAB_FILE_KEY;
@@ -192,14 +195,15 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
 
   private final ReplicationActivityStatus replicationStatus;
   private final SCMChillModeManager scmChillModeManager;
+  private final CertificateServer certificateServer;
 
   private JvmPauseMonitor jvmPauseMonitor;
   private final OzoneConfiguration configuration;
 
   /**
-   * Creates a new StorageContainerManager. Configuration will be updated
-   * with information on the
-   * actual listening addresses used for RPC servers.
+   * Creates a new StorageContainerManager. Configuration will be
+   * updated with information on the actual listening addresses used
+   * for RPC servers.
    *
    * @param conf configuration
    */
@@ -209,17 +213,29 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
     configuration = conf;
     StorageContainerManager.initMetrics();
     initContainerReportCache(conf);
-    // Authenticate SCM if security is enabled
-    if (OzoneSecurityUtil.isSecurityEnabled(conf)) {
-      loginAsSCMUser(conf);
-    }
-
     scmStorage = new SCMStorage(conf);
     if (scmStorage.getState() != StorageState.INITIALIZED) {
       throw new SCMException("SCM not initialized.", ResultCodes
           .SCM_NOT_INITIALIZED);
     }
 
+    // Authenticate SCM if security is enabled
+    if (OzoneSecurityUtil.isSecurityEnabled(conf)) {
+      loginAsSCMUser(conf);
+      certificateServer = initializeCertificateServer(
+          getScmStorage().getClusterID(), getScmStorage().getScmId());
+      // TODO: Support Intermediary CAs in future.
+      certificateServer.init(new SecurityConfig(conf),
+          CertificateServer.CAType.SELF_SIGNED_CA);
+    } else {
+      // if no Security, we do not create a Certificate Server at all.
+      // This allows user to boot SCM without security temporarily
+      // and then come back and enable it without any impact.
+      certificateServer = null;
+    }
+
+
+
     eventQueue = new EventQueue();
 
     scmNodeManager = new SCMNodeManager(
@@ -362,15 +378,33 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
     LOG.info("SCM login successful.");
   }
 
+
+  /**
+   * This function creates/initializes a certificate server as needed.
+   * This function is idempotent, so calling this again and again after the
+   * server is initialized is not a problem.
+   *
+   * @param clusterID - Cluster ID
+   * @param scmID     - SCM ID
+   */
+  private CertificateServer initializeCertificateServer(String clusterID,
+      String scmID) throws IOException {
+    // TODO: Support Certificate Server loading via Class Name loader.
+    // So it is easy to use different Certificate Servers if needed.
+    String subject = "scm@" + InetAddress.getLocalHost().getHostName();
+    return new DefaultCAServer(subject, clusterID, scmID);
+
+  }
+
   /**
    * Builds a message for logging startup information about an RPC server.
    *
    * @param description RPC server description
-   * @param addr RPC server listening address
+   * @param addr        RPC server listening address
    * @return server startup message
    */
   public static String buildRpcServerStartMessage(String description,
-      InetSocketAddress addr) {
+                                                  InetSocketAddress addr) {
     return addr != null
         ? String.format("%s is listening at %s", description, addr.toString())
         : String.format("%s not started", description);
@@ -445,7 +479,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
 
   /**
    * Create an SCM instance based on the supplied command-line arguments.
-   *
+   * <p>
    * This method is intended for unit tests only. It suppresses the
    * startup/shutdown message and skips registering Unix signal
    * handlers.
@@ -465,8 +499,8 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
   /**
    * Create an SCM instance based on the supplied command-line arguments.
    *
-   * @param args command-line arguments.
-   * @param conf HDDS configuration
+   * @param args        command-line arguments.
+   * @param conf        HDDS configuration
    * @param printBanner if true, then log a verbose startup message.
    * @return SCM instance
    * @throws IOException, AuthenticationException
@@ -736,7 +770,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
     LOG.info(buildRpcServerStartMessage("ScmDatanodeProtocl RPC " +
         "server", getDatanodeProtocolServer().getDatanodeRpcAddress()));
     getDatanodeProtocolServer().start();
-    if(getSecurityProtocolServer() != null) {
+    if (getSecurityProtocolServer() != null) {
       getSecurityProtocolServer().start();
     }
 
@@ -853,7 +887,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
       getBlockProtocolServer().join();
       getClientProtocolServer().join();
       getDatanodeProtocolServer().join();
-      if(getSecurityProtocolServer() != null) {
+      if (getSecurityProtocolServer() != null) {
         getSecurityProtocolServer().join();
       }
     } catch (InterruptedException e) {
@@ -987,7 +1021,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
   /**
    * Returns EventPublisher.
    */
-  public EventPublisher getEventQueue(){
+  public EventPublisher getEventQueue() {
     return eventQueue;
   }
 
@@ -1007,7 +1041,7 @@ public final class StorageContainerManager extends ServiceRuntimeInfoImpl
   @Override
   public Map<String, Integer> getContainerStateCount() {
     Map<String, Integer> nodeStateCount = new HashMap<>();
-    for (HddsProtos.LifeCycleState state: HddsProtos.LifeCycleState.values()) {
+    for (HddsProtos.LifeCycleState state : HddsProtos.LifeCycleState.values()) {
       nodeStateCount.put(state.toString(), containerManager.getContainers(
           state).size());
     }

+ 20 - 0
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/package-info.java

@@ -0,0 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Ozone Client tests.
+ */
+package org.apache.hadoop.ozone.client;