Selaa lähdekoodia

HDFS-13405. Ozone: Rename HDSL to HDDS.
Contributed by Ajay Kumar, Elek Marton, Mukul Kumar Singh, Shashikant Banerjee and Anu Engineer.

Anu Engineer 7 vuotta sitten
vanhempi
commit
8b832f3c35
100 muutettua tiedostoa jossa 585 lisäystä ja 533 poistoa
  1. 1 1
      hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml
  2. 4 4
      hadoop-cblock/server/pom.xml
  3. 7 7
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CBlockManager.java
  4. 2 2
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CblockUtils.java
  5. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/client/CBlockVolumeClient.java
  6. 3 3
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/BlockWriterTask.java
  7. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockClientProtocolClientSideTranslatorPB.java
  8. 2 2
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockIStorageImpl.java
  9. 2 2
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockTargetServer.java
  10. 2 2
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/ContainerCacheFlusher.java
  11. 10 10
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/SCSITargetDaemon.java
  12. 4 4
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/AsyncBlockWriter.java
  13. 2 2
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/CBlockLocalCache.java
  14. 4 4
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/SyncBlockReader.java
  15. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/kubernetes/DynamicProvisioner.java
  16. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/ContainerDescriptor.java
  17. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/VolumeDescriptor.java
  18. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/proto/MountVolumeResponse.java
  19. 1 1
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/protocolPB/CBlockClientServerProtocolServerSideTranslatorPB.java
  20. 6 6
      hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/storage/StorageManager.java
  21. 2 2
      hadoop-cblock/server/src/main/proto/CBlockClientServerProtocol.proto
  22. 6 6
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestBufferManager.java
  23. 10 10
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockReadWrite.java
  24. 2 2
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServer.java
  25. 2 2
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServerPersistence.java
  26. 6 6
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestLocalBlockCache.java
  27. 1 1
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/kubernetes/TestDynamicProvisioner.java
  28. 1 1
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/ContainerLookUpService.java
  29. 12 12
      hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/MockStorageClient.java
  30. 1 1
      hadoop-cblock/tools/src/main/java/org/apache/hadoop/cblock/cli/CBlockCli.java
  31. 1 1
      hadoop-cblock/tools/src/test/org/apache/hadoop/cblock/TestCBlockCLI.java
  32. 2 2
      hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
  33. 5 5
      hadoop-dist/pom.xml
  34. 1 1
      hadoop-dist/src/main/compose/cblock/docker-config
  35. 1 1
      hadoop-dist/src/main/compose/ozone/docker-config
  36. 6 6
      hadoop-hdds/client/pom.xml
  37. 10 10
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java
  38. 4 6
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java
  39. 3 3
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java
  40. 21 22
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java
  41. 2 2
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java
  42. 12 10
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java
  43. 23 19
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java
  44. 68 67
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java
  45. 1 1
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
  46. 1 1
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
  47. 9 9
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java
  48. 11 11
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java
  49. 1 1
      hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java
  50. 1 1
      hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
  51. 6 7
      hadoop-hdds/common/pom.xml
  52. 6 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
  53. 17 18
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
  54. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java
  55. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java
  56. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java
  57. 23 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java
  58. 7 6
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java
  59. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java
  60. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java
  61. 7 7
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java
  62. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java
  63. 13 13
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java
  64. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java
  65. 7 7
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java
  66. 11 11
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java
  67. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java
  68. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java
  69. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java
  70. 13 13
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java
  71. 2 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java
  72. 10 10
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java
  73. 11 11
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java
  74. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java
  75. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java
  76. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java
  77. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java
  78. 8 8
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java
  79. 4 4
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java
  80. 14 13
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java
  81. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java
  82. 27 19
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java
  83. 3 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java
  84. 40 28
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java
  85. 4 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java
  86. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java
  87. 24 25
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java
  88. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java
  89. 0 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java
  90. 3 4
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java
  91. 0 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
  92. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java
  93. 5 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java
  94. 3 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java
  95. 7 8
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
  96. 6 7
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java
  97. 0 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java
  98. 0 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java
  99. 0 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java
  100. 0 0
      hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java

+ 1 - 1
hadoop-assemblies/src/main/resources/assemblies/hadoop-src.xml

@@ -51,7 +51,7 @@
         <exclude>**/file:/**</exclude>
         <exclude>**/SecurityAuth.audit*</exclude>
         <exclude>hadoop-ozone/**</exclude>
-        <exclude>hadoop-hdsl/**</exclude>
+        <exclude>hadoop-hdds/**</exclude>
         <exclude>hadoop-cblock/**</exclude>
       </excludes>
     </fileSet>

+ 4 - 4
hadoop-cblock/server/pom.xml

@@ -37,7 +37,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdsl-server-framework</artifactId>
+      <artifactId>hadoop-hdds-server-framework</artifactId>
     </dependency>
 
     <dependency>
@@ -47,12 +47,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdsl-common</artifactId>
+      <artifactId>hadoop-hdds-common</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdsl-client</artifactId>
+      <artifactId>hadoop-hdds-client</artifactId>
     </dependency>
 
     <dependency>
@@ -132,7 +132,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
                   ${basedir}/../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto/
                 </param>
                 <param>
-                  ${basedir}/../../hadoop-hdsl/common/src/main/proto/
+                  ${basedir}/../../hadoop-hdds/common/src/main/proto/
                 </param>
                 <param>${basedir}/src/main/proto</param>
               </imports>

+ 7 - 7
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CBlockManager.java

@@ -37,24 +37,24 @@ import org.apache.hadoop.cblock.protocolPB
 import org.apache.hadoop.ipc.Client;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.client.ContainerOperationClient;
-import org.apache.hadoop.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.cblock.storage.StorageManager;
 import org.apache.hadoop.cblock.util.KeyUtil;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.scm.protocolPB
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.protocolPB
     .StorageContainerLocationProtocolClientSideTranslatorPB;
-import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolPB;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.utils.LevelDBStore;
 
 import static org.apache.hadoop.cblock.CblockUtils.getCblockServerRpcAddr;
 import static org.apache.hadoop.cblock.CblockUtils.getCblockServiceRpcAddr;
-import static org.apache.hadoop.hdsl.server.ServerUtils
+import static org.apache.hadoop.hdds.server.ServerUtils
     .updateRPCListenAddress;
 import org.iq80.leveldb.DBIterator;
 import org.slf4j.Logger;

+ 2 - 2
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/CblockUtils.java

@@ -36,8 +36,8 @@ import static org.apache.hadoop.cblock.CBlockConfigKeys
     .DFS_CBLOCK_SERVICERPC_HOSTNAME_DEFAULT;
 import static org.apache.hadoop.cblock.CBlockConfigKeys
     .DFS_CBLOCK_SERVICERPC_PORT_DEFAULT;
-import static org.apache.hadoop.hdsl.HdslUtils.getHostNameFromConfigKeys;
-import static org.apache.hadoop.hdsl.HdslUtils.getPortNumberFromConfigKeys;
+import static org.apache.hadoop.hdds.HddsUtils.getHostNameFromConfigKeys;
+import static org.apache.hadoop.hdds.HddsUtils.getPortNumberFromConfigKeys;
 
 /**
  * Generic stateless utility functions for CBlock components.

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/client/CBlockVolumeClient.java

@@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.hadoop.cblock.CBlockConfigKeys;
 import org.apache.hadoop.cblock.meta.VolumeInfo;
 import org.apache.hadoop.cblock.protocolPB.CBlockServiceProtocolPB;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.net.NetUtils;

+ 3 - 3
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/BlockWriterTask.java

@@ -21,9 +21,9 @@ import com.google.common.base.Preconditions;
 import com.google.common.primitives.Longs;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.cblock.jscsiHelper.cache.impl.AsyncBlockWriter;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.utils.LevelDBStore;
 

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockClientProtocolClientSideTranslatorPB.java

@@ -39,7 +39,7 @@ import org.apache.hadoop.cblock.protocolPB.CBlockClientServerProtocolPB;
 import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 
 import java.io.Closeable;
 import java.io.IOException;

+ 2 - 2
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockIStorageImpl.java

@@ -23,8 +23,8 @@ import org.apache.hadoop.cblock.jscsiHelper.cache.CacheModule;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 import org.jscsi.target.storage.IStorageModule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 2 - 2
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/CBlockTargetServer.java

@@ -20,8 +20,8 @@ package org.apache.hadoop.cblock.jscsiHelper;
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.cblock.proto.MountVolumeResponse;
 import org.apache.hadoop.cblock.util.KeyUtil;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
 import org.jscsi.target.Configuration;
 import org.jscsi.target.Target;
 import org.jscsi.target.TargetServer;

+ 2 - 2
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/ContainerCacheFlusher.java

@@ -26,8 +26,8 @@ import org.apache.hadoop.cblock.jscsiHelper.cache.impl.AsyncBlockWriter;
 import org.apache.hadoop.cblock.jscsiHelper.cache.impl.DiskBlock;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.utils.LevelDBStore;
 import org.iq80.leveldb.Options;

+ 10 - 10
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/SCSITargetDaemon.java

@@ -31,9 +31,9 @@ import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.scm.client.ContainerOperationClient;
+import org.apache.hadoop.hdds.scm.client.ContainerOperationClient;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.jscsi.target.Configuration;
 
@@ -47,14 +47,14 @@ import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_PORT_DE
 import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_PORT_KEY;
 import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_SERVER_ADDRESS_DEFAULT;
 import static org.apache.hadoop.cblock.CBlockConfigKeys.DFS_CBLOCK_JSCSI_SERVER_ADDRESS_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_DEFAULT;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT;
-import static org.apache.hadoop.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_ADDRESS_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_BIND_HOST_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_CLIENT_PORT_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DATANODE_ADDRESS_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DATANODE_PORT_KEY;
 
 /**
  * This class runs the target server process.

+ 4 - 4
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/AsyncBlockWriter.java

@@ -22,10 +22,10 @@ import com.google.common.primitives.Longs;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.utils.LevelDBStore;
 import org.slf4j.Logger;

+ 2 - 2
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/CBlockLocalCache.java

@@ -24,8 +24,8 @@ import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher;
 import org.apache.hadoop.cblock.jscsiHelper.cache.CacheModule;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics;
 import org.apache.hadoop.utils.LevelDBStore;
 import org.slf4j.Logger;

+ 4 - 4
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/jscsiHelper/cache/impl/SyncBlockReader.java

@@ -22,10 +22,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.concurrent.HadoopThreadPoolExecutor;
 import org.apache.hadoop.utils.LevelDBStore;

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/kubernetes/DynamicProvisioner.java

@@ -38,7 +38,7 @@ import org.apache.hadoop.cblock.CblockUtils;
 import org.apache.hadoop.cblock.exception.CBlockException;
 import org.apache.hadoop.cblock.proto.MountVolumeResponse;
 import org.apache.hadoop.cblock.storage.StorageManager;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.ratis.shaded.com.google.common.annotations.VisibleForTesting;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/ContainerDescriptor.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.cblock.meta;
 
 import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.cblock.protocol.proto.CBlockClientServerProtocolProtos;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 
 /**
  *

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/meta/VolumeDescriptor.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.cblock.meta;
 
 import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.cblock.protocol.proto.CBlockClientServerProtocolProtos;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/proto/MountVolumeResponse.java

@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.cblock.proto;
 
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 
 import java.util.HashMap;
 import java.util.List;

+ 1 - 1
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/protocolPB/CBlockClientServerProtocolServerSideTranslatorPB.java

@@ -25,7 +25,7 @@ import org.apache.hadoop.cblock.proto.MountVolumeResponse;
 import org.apache.hadoop.cblock.protocol.proto.CBlockClientServerProtocolProtos;
 import org.apache.hadoop.cblock.protocol.proto.CBlockServiceProtocolProtos;
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 
 import java.io.IOException;
 import java.util.HashMap;

+ 6 - 6
hadoop-cblock/server/src/main/java/org/apache/hadoop/cblock/storage/StorageManager.java

@@ -25,10 +25,10 @@ import org.apache.hadoop.cblock.meta.VolumeDescriptor;
 import org.apache.hadoop.cblock.meta.VolumeInfo;
 import org.apache.hadoop.cblock.proto.MountVolumeResponse;
 import org.apache.hadoop.cblock.util.KeyUtil;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.scm.client.ScmClient;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -187,8 +187,8 @@ public class StorageManager {
       ContainerDescriptor container = null;
       try {
         Pipeline pipeline = storageClient.createContainer(
-            HdslProtos.ReplicationType.STAND_ALONE,
-            HdslProtos.ReplicationFactor.ONE,
+            HddsProtos.ReplicationType.STAND_ALONE,
+            HddsProtos.ReplicationFactor.ONE,
             KeyUtil.getContainerName(volume.getUserName(),
                 volume.getVolumeName(), containerIdx), cblockId);
 

+ 2 - 2
hadoop-cblock/server/src/main/proto/CBlockClientServerProtocol.proto

@@ -27,7 +27,7 @@ option java_generic_services = true;
 option java_generate_equals_and_hash = true;
 package hadoop.cblock;
 
-import "hdsl.proto";
+import "hdds.proto";
 import "CBlockServiceProtocol.proto";
 /**
 * This message is sent from CBlock client side to CBlock server to
@@ -69,7 +69,7 @@ message ContainerIDProto {
     required string containerID = 1;
     required uint64 index = 2;
     // making pipeline optional to be compatible with exisiting tests
-    optional hadoop.hdsl.Pipeline pipeline = 3;
+    optional hadoop.hdds.Pipeline pipeline = 3;
 }
 
 

+ 6 - 6
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestBufferManager.java

@@ -23,13 +23,13 @@ import org.apache.commons.lang.RandomStringUtils;
 import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics;
 import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher;
 import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.ozone.MiniOzoneCluster;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;

+ 10 - 10
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockReadWrite.java

@@ -24,21 +24,21 @@ import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics;
 import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;
 import org.apache.hadoop.ozone.MiniOzoneCluster;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.PipelineChannel;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.protocolPB
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.PipelineChannel;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB
     .StorageContainerLocationProtocolClientSideTranslatorPB;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.AfterClass;
 import org.junit.Assert;

+ 2 - 2
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServer.java

@@ -19,8 +19,8 @@ package org.apache.hadoop.cblock;
 
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.hadoop.cblock.meta.VolumeInfo;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.scm.client.ScmClient;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.cblock.util.MockStorageClient;
 import org.junit.After;
 import org.junit.Before;

+ 2 - 2
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestCBlockServerPersistence.java

@@ -18,8 +18,8 @@
 package org.apache.hadoop.cblock;
 
 import org.apache.hadoop.cblock.meta.VolumeDescriptor;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
-import org.apache.hadoop.scm.client.ScmClient;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.cblock.util.MockStorageClient;
 import org.junit.Test;

+ 6 - 6
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/TestLocalBlockCache.java

@@ -25,16 +25,16 @@ import org.apache.hadoop.cblock.jscsiHelper.CBlockTargetMetrics;
 import org.apache.hadoop.cblock.jscsiHelper.ContainerCacheFlusher;
 import org.apache.hadoop.cblock.jscsiHelper.cache.LogicalBlock;
 import org.apache.hadoop.cblock.jscsiHelper.cache.impl.CBlockLocalCache;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ozone.MiniOzoneClassicCluster;
 import org.apache.hadoop.ozone.MiniOzoneCluster;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Time;
 import org.junit.AfterClass;

+ 1 - 1
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/kubernetes/TestDynamicProvisioner.java

@@ -29,7 +29,7 @@ import org.junit.Test;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 
 /**
  * Test the resource generation of Dynamic Provisioner.

+ 1 - 1
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/ContainerLookUpService.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.cblock.util;
 
 import org.apache.hadoop.cblock.meta.ContainerDescriptor;
 import org.apache.hadoop.ozone.container.ContainerTestHelper;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 
 import java.io.IOException;
 import java.util.concurrent.ConcurrentHashMap;

+ 12 - 12
hadoop-cblock/server/src/test/java/org/apache/hadoop/cblock/util/MockStorageClient.java

@@ -18,12 +18,12 @@
 package org.apache.hadoop.cblock.util;
 
 import org.apache.hadoop.cblock.meta.ContainerDescriptor;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.scm.client.ScmClient;
-import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -88,7 +88,7 @@ public class MockStorageClient implements ScmClient {
     ContainerInfo container = new ContainerInfo.Builder()
         .setContainerName(containerDescriptor.getContainerID())
         .setPipeline(containerDescriptor.getPipeline())
-        .setState(HdslProtos.LifeCycleState.ALLOCATED)
+        .setState(HddsProtos.LifeCycleState.ALLOCATED)
         .build();
     containerList.add(container);
     return containerList;
@@ -134,8 +134,8 @@ public class MockStorageClient implements ScmClient {
   }
 
   @Override
-  public Pipeline createContainer(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor replicationFactor, String containerId,
+  public Pipeline createContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor replicationFactor, String containerId,
       String owner) throws IOException {
     int contId = currentContainerId.getAndIncrement();
     ContainerLookUpService.addContainer(Long.toString(contId));
@@ -153,8 +153,8 @@ public class MockStorageClient implements ScmClient {
    * @throws IOException
    */
   @Override
-  public HdslProtos.NodePool queryNode(EnumSet<HdslProtos.NodeState>
-      nodeStatuses, HdslProtos.QueryScope queryScope, String poolName)
+  public HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState>
+      nodeStatuses, HddsProtos.QueryScope queryScope, String poolName)
       throws IOException {
     return null;
   }
@@ -168,8 +168,8 @@ public class MockStorageClient implements ScmClient {
    * @throws IOException
    */
   @Override
-  public Pipeline createReplicationPipeline(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool)
+  public Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
       throws IOException {
     return null;
   }

+ 1 - 1
hadoop-cblock/tools/src/main/java/org/apache/hadoop/cblock/cli/CBlockCli.java

@@ -32,7 +32,7 @@ import org.apache.hadoop.cblock.protocolPB.CBlockServiceProtocolPB;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 

+ 1 - 1
hadoop-cblock/tools/src/test/org/apache/hadoop/cblock/TestCBlockCLI.java

@@ -21,7 +21,7 @@ import org.apache.hadoop.cblock.cli.CBlockCli;
 import org.apache.hadoop.cblock.meta.VolumeDescriptor;
 import org.apache.hadoop.cblock.util.MockStorageClient;
 import org.apache.hadoop.conf.OzoneConfiguration;
-import org.apache.hadoop.scm.client.ScmClient;
+import org.apache.hadoop.hdds.scm.client.ScmClient;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
 import org.junit.AfterClass;

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh

@@ -596,8 +596,8 @@ function hadoop_bootstrap
   YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
   MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
   MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
-  HDSL_DIR=${HDSL_DIR:-"share/hadoop/hdsl"}
-  HDSL_LIB_JARS_DIR=${HDSL_LIB_JARS_DIR:-"share/hadoop/hdsl/lib"}
+  HDDS_DIR=${HDDS_DIR:-"share/hadoop/hdds"}
+  HDDS_LIB_JARS_DIR=${HDDS_LIB_JARS_DIR:-"share/hadoop/hdds/lib"}
   OZONE_DIR=${OZONE_DIR:-"share/hadoop/ozone"}
   OZONE_LIB_JARS_DIR=${OZONE_LIB_JARS_DIR:-"share/hadoop/ozone/lib"}
   CBLOCK_DIR=${CBLOCK_DIR:-"share/hadoop/cblock"}

+ 5 - 5
hadoop-dist/pom.xml

@@ -219,7 +219,7 @@
     </profile>
 
     <profile>
-      <id>hdsl</id>
+      <id>hdds</id>
       <activation>
         <activeByDefault>false</activeByDefault>
       </activation>
@@ -231,11 +231,11 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdsl-server-scm</artifactId>
+          <artifactId>hadoop-hdds-server-scm</artifactId>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdsl-tools</artifactId>
+          <artifactId>hadoop-hdds-tools</artifactId>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -243,7 +243,7 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdsl-container-service</artifactId>
+          <artifactId>hadoop-hdds-container-service</artifactId>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -251,7 +251,7 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdsl-tools</artifactId>
+          <artifactId>hadoop-hdds-tools</artifactId>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

+ 1 - 1
hadoop-dist/src/main/compose/cblock/docker-config

@@ -27,7 +27,7 @@ OZONE-SITE.XML_ozone.scm.client.address=scm
 OZONE-SITE.XML_dfs.cblock.jscsi.cblock.server.address=cblock
 OZONE-SITE.XML_dfs.cblock.scm.ipaddress=scm
 OZONE-SITE.XML_dfs.cblock.service.leveldb.path=/tmp
-HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HdslDatanodeService
+HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HddsDatanodeService
 HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
 HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode
 HDFS-SITE.XML_rpc.metrics.quantile.enable=true

+ 1 - 1
hadoop-dist/src/main/compose/ozone/docker-config

@@ -27,7 +27,7 @@ HDFS-SITE.XML_dfs.namenode.rpc-address=namenode:9000
 HDFS-SITE.XML_dfs.namenode.name.dir=/data/namenode
 HDFS-SITE.XML_rpc.metrics.quantile.enable=true
 HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
-HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HdslDatanodeService
+HDFS-SITE.XML_dfs.datanode.plugins=org.apache.hadoop.ozone.web.ObjectStoreRestPlugin,org.apache.hadoop.ozone.HddsDatanodeService
 LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
 LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout

+ 6 - 6
hadoop-hdsl/client/pom.xml → hadoop-hdds/client/pom.xml

@@ -19,24 +19,24 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-hdsl</artifactId>
+    <artifactId>hadoop-hdds</artifactId>
     <version>3.2.0-SNAPSHOT</version>
   </parent>
-  <artifactId>hadoop-hdsl-client</artifactId>
+  <artifactId>hadoop-hdds-client</artifactId>
   <version>3.2.0-SNAPSHOT</version>
-  <description>Apache Hadoop HDSL Client libraries</description>
-  <name>Apache Hadoop HDSL Client</name>
+  <description>Apache Hadoop Distributed Data Store Client libraries</description>
+  <name>Apache HDDS Client</name>
   <packaging>jar</packaging>
 
   <properties>
-    <hadoop.component>hdsl</hadoop.component>
+    <hadoop.component>hdds</hadoop.component>
     <is.hadoop.component>true</is.hadoop.component>
   </properties>
 
   <dependencies>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdsl-common</artifactId>
+      <artifactId>hadoop-hdds-common</artifactId>
       <scope>provided</scope>
     </dependency>
 

+ 10 - 10
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClient.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClient.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
@@ -28,19 +28,19 @@ import io.netty.channel.socket.nio.NioSocketChannel;
 import io.netty.handler.logging.LogLevel;
 import io.netty.handler.logging.LoggingHandler;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
-import org.apache.hadoop.ozone.client.OzoneClientUtils;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ExecutionException;
-import java.util.List;
 import java.util.concurrent.Semaphore;
 
 /**
@@ -69,7 +69,7 @@ public class XceiverClient extends XceiverClientSpi {
     this.pipeline = pipeline;
     this.config = config;
     this.semaphore =
-        new Semaphore(OzoneClientUtils.getMaxOutstandingRequests(config));
+        new Semaphore(HddsClientUtils.getMaxOutstandingRequests(config));
   }
 
   @Override
@@ -186,7 +186,7 @@ public class XceiverClient extends XceiverClientSpi {
    * @return - Stand Alone as the type.
    */
   @Override
-  public HdslProtos.ReplicationType getPipelineType() {
-    return HdslProtos.ReplicationType.STAND_ALONE;
+  public HddsProtos.ReplicationType getPipelineType() {
+    return HddsProtos.ReplicationType.STAND_ALONE;
   }
 }

+ 4 - 6
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientHandler.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientHandler.java

@@ -15,19 +15,17 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import com.google.common.base.Preconditions;
-
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.SimpleChannelInboundHandler;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ContainerCommandResponseProto;
-
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 import org.apache.hadoop.util.Time;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 3 - 3
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientInitializer.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientInitializer.java

@@ -15,7 +15,7 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import io.netty.channel.ChannelInitializer;
 import io.netty.channel.ChannelPipeline;
@@ -24,8 +24,8 @@ import io.netty.handler.codec.protobuf.ProtobufDecoder;
 import io.netty.handler.codec.protobuf.ProtobufEncoder;
 import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
 import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
 
 import java.util.concurrent.Semaphore;
 

+ 21 - 22
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientManager.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientManager.java

@@ -16,33 +16,32 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.Callable;
+package org.apache.hadoop.hdds.scm;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.RemovalListener;
 import com.google.common.cache.RemovalNotification;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 
-import static org.apache.hadoop.scm.ScmConfigKeys
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
+    .SCM_CONTAINER_CLIENT_MAX_SIZE_KEY;
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
     .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_DEFAULT;
-import static org.apache.hadoop.scm.ScmConfigKeys
+import static org.apache.hadoop.hdds.scm.ScmConfigKeys
     .SCM_CONTAINER_CLIENT_STALE_THRESHOLD_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys
-    .SCM_CONTAINER_CLIENT_MAX_SIZE_KEY;
-import static org.apache.hadoop.scm.ScmConfigKeys
-    .SCM_CONTAINER_CLIENT_MAX_SIZE_DEFAULT;
-import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos
     .ReplicationType.RATIS;
 
 /**
@@ -186,24 +185,24 @@ public class XceiverClientManager implements Closeable {
    * Returns hard coded 3 as replication factor.
    * @return 3
    */
-  public  HdslProtos.ReplicationFactor getFactor() {
+  public  HddsProtos.ReplicationFactor getFactor() {
     if(isUseRatis()) {
-      return HdslProtos.ReplicationFactor.THREE;
+      return HddsProtos.ReplicationFactor.THREE;
     }
-    return HdslProtos.ReplicationFactor.ONE;
+    return HddsProtos.ReplicationFactor.ONE;
   }
 
   /**
    * Returns the default replication type.
    * @return Ratis or Standalone
    */
-  public HdslProtos.ReplicationType getType() {
+  public HddsProtos.ReplicationType getType() {
     // TODO : Fix me and make Ratis default before release.
     // TODO: Remove this as replication factor and type are pipeline properties
     if(isUseRatis()) {
-      return HdslProtos.ReplicationType.RATIS;
+      return HddsProtos.ReplicationType.RATIS;
     }
-    return HdslProtos.ReplicationType.STAND_ALONE;
+    return HddsProtos.ReplicationType.STAND_ALONE;
   }
 
   /**

+ 2 - 2
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientMetrics.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientMetrics.java

@@ -15,10 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
 import org.apache.hadoop.metrics2.MetricsSystem;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;

+ 12 - 10
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/XceiverClientRatis.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientRatis.java

@@ -16,17 +16,19 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import com.google.common.base.Preconditions;
 import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerCommandResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.ozone.client.OzoneClientUtils;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ContainerCommandResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.ratis.RatisHelper;
 import org.apache.ratis.client.RaftClient;
 import org.apache.ratis.protocol.RaftClientReply;
@@ -60,7 +62,7 @@ public final class XceiverClientRatis extends XceiverClientSpi {
         ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_KEY,
         ScmConfigKeys.DFS_CONTAINER_RATIS_RPC_TYPE_DEFAULT);
     final int maxOutstandingRequests =
-        OzoneClientUtils.getMaxOutstandingRequests(ozoneConf);
+        HddsClientUtils.getMaxOutstandingRequests(ozoneConf);
     return new XceiverClientRatis(pipeline,
         SupportedRpcType.valueOfIgnoreCase(rpcType), maxOutstandingRequests);
   }
@@ -98,8 +100,8 @@ public final class XceiverClientRatis extends XceiverClientSpi {
    * @return - Ratis
    */
   @Override
-  public HdslProtos.ReplicationType getPipelineType() {
-    return HdslProtos.ReplicationType.RATIS;
+  public HddsProtos.ReplicationType getPipelineType() {
+    return HddsProtos.ReplicationType.RATIS;
   }
 
   private void reinitialize(List<DatanodeDetails> datanodes, RaftGroup group)

+ 23 - 19
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/ContainerOperationClient.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/ContainerOperationClient.java

@@ -15,20 +15,22 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.scm.client;
+package org.apache.hadoop.hdds.scm.client;
 
 import com.google.common.base.Preconditions;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ReadContainerResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.protocolPB
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocolPB
     .StorageContainerLocationProtocolClientSideTranslatorPB;
-import org.apache.hadoop.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -37,8 +39,10 @@ import java.util.EnumSet;
 import java.util.List;
 import java.util.UUID;
 
-import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState.ALLOCATED;
-import static org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState.OPEN;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState
+    .ALLOCATED;
+import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState
+    .OPEN;
 
 /**
  * This class provides the client-facing APIs of container operations.
@@ -189,8 +193,8 @@ public class ContainerOperationClient implements ScmClient {
    * @inheritDoc
    */
   @Override
-  public Pipeline createContainer(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor factor,
+  public Pipeline createContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor,
       String containerId, String owner) throws IOException {
     XceiverClientSpi client = null;
     try {
@@ -229,8 +233,8 @@ public class ContainerOperationClient implements ScmClient {
    * @throws IOException
    */
   @Override
-  public HdslProtos.NodePool queryNode(EnumSet<HdslProtos.NodeState>
-      nodeStatuses, HdslProtos.QueryScope queryScope, String poolName)
+  public HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState>
+      nodeStatuses, HddsProtos.QueryScope queryScope, String poolName)
       throws IOException {
     return storageContainerLocationClient.queryNode(nodeStatuses, queryScope,
         poolName);
@@ -240,8 +244,8 @@ public class ContainerOperationClient implements ScmClient {
    * Creates a specified replication pipeline.
    */
   @Override
-  public Pipeline createReplicationPipeline(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool)
+  public Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
       throws IOException {
     return storageContainerLocationClient.createReplicationPipeline(type,
         factor, nodePool);

+ 68 - 67
hadoop-hdsl/client/src/main/java/org/apache/hadoop/ozone/client/OzoneClientUtils.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java

@@ -16,30 +16,28 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.client;
-
-import java.text.ParseException;
-import java.time.Instant;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.concurrent.TimeUnit;
+package org.apache.hadoop.hdds.scm.client;
 
+import com.google.common.base.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.scm.ScmConfigKeys;
-
-import com.google.common.base.Preconditions;
 import org.apache.http.client.config.RequestConfig;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.text.ParseException;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.concurrent.TimeUnit;
+
 /**
  * Utility methods for Ozone and Container Clients.
  *
@@ -49,14 +47,14 @@ import org.slf4j.LoggerFactory;
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
-public final class OzoneClientUtils {
+public final class HddsClientUtils {
 
   private static final Logger LOG = LoggerFactory.getLogger(
-      OzoneClientUtils.class);
+      HddsClientUtils.class);
 
   private static final int NO_PORT = -1;
 
-  private OzoneClientUtils() {
+  private HddsClientUtils() {
   }
 
   /**
@@ -69,55 +67,28 @@ public final class OzoneClientUtils {
         return format.withZone(ZoneId.of(OzoneConsts.OZONE_TIME_ZONE));
       });
 
+
   /**
-   * Returns the cache value to be used for list calls.
-   * @param conf Configuration object
-   * @return list cache size
+   * Convert time in millisecond to a human readable format required in ozone.
+   * @return a human readable string for the input time
    */
-  public static int getListCacheSize(Configuration conf) {
-    return conf.getInt(OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE,
-        OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE_DEFAULT);
+  public static String formatDateTime(long millis) {
+    ZonedDateTime dateTime = ZonedDateTime.ofInstant(
+        Instant.ofEpochSecond(millis), DATE_FORMAT.get().getZone());
+    return DATE_FORMAT.get().format(dateTime);
   }
 
   /**
-   * @return a default instance of {@link CloseableHttpClient}.
+   * Convert time in ozone date format to millisecond.
+   * @return time in milliseconds
    */
-  public static CloseableHttpClient newHttpClient() {
-    return OzoneClientUtils.newHttpClient(new OzoneConfiguration());
+  public static long formatDateTime(String date) throws ParseException {
+    Preconditions.checkNotNull(date, "Date string should not be null.");
+    return ZonedDateTime.parse(date, DATE_FORMAT.get())
+        .toInstant().getEpochSecond();
   }
 
-  /**
-   * Returns a {@link CloseableHttpClient} configured by given configuration.
-   * If conf is null, returns a default instance.
-   *
-   * @param conf configuration
-   * @return a {@link CloseableHttpClient} instance.
-   */
-  public static CloseableHttpClient newHttpClient(Configuration conf) {
-    long socketTimeout = OzoneConfigKeys
-        .OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT;
-    long connectionTimeout = OzoneConfigKeys
-        .OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT;
-    if (conf != null) {
-      socketTimeout = conf.getTimeDuration(
-          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT,
-          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT,
-          TimeUnit.MILLISECONDS);
-      connectionTimeout = conf.getTimeDuration(
-          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT,
-          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT,
-          TimeUnit.MILLISECONDS);
-    }
 
-    CloseableHttpClient client = HttpClients.custom()
-        .setDefaultRequestConfig(
-            RequestConfig.custom()
-                .setSocketTimeout(Math.toIntExact(socketTimeout))
-                .setConnectTimeout(Math.toIntExact(connectionTimeout))
-                .build())
-        .build();
-    return client;
-  }
 
   /**
    * verifies that bucket name / volume name is a valid DNS name.
@@ -199,23 +170,53 @@ public final class OzoneClientUtils {
   }
 
   /**
-   * Convert time in millisecond to a human readable format required in ozone.
-   * @return a human readable string for the input time
+   * Returns the cache value to be used for list calls.
+   * @param conf Configuration object
+   * @return list cache size
    */
-  public static String formatDateTime(long millis) {
-    ZonedDateTime dateTime = ZonedDateTime.ofInstant(
-        Instant.ofEpochSecond(millis), DATE_FORMAT.get().getZone());
-    return DATE_FORMAT.get().format(dateTime);
+  public static int getListCacheSize(Configuration conf) {
+    return conf.getInt(OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE,
+        OzoneConfigKeys.OZONE_CLIENT_LIST_CACHE_SIZE_DEFAULT);
   }
 
   /**
-   * Convert time in ozone date format to millisecond.
-   * @return time in milliseconds
+   * @return a default instance of {@link CloseableHttpClient}.
    */
-  public static long formatDateTime(String date) throws ParseException {
-    Preconditions.checkNotNull(date, "Date string should not be null.");
-    return ZonedDateTime.parse(date, DATE_FORMAT.get())
-        .toInstant().getEpochSecond();
+  public static CloseableHttpClient newHttpClient() {
+    return HddsClientUtils.newHttpClient(new Configuration());
+  }
+
+  /**
+   * Returns a {@link CloseableHttpClient} configured by given configuration.
+   * If conf is null, returns a default instance.
+   *
+   * @param conf configuration
+   * @return a {@link CloseableHttpClient} instance.
+   */
+  public static CloseableHttpClient newHttpClient(Configuration conf) {
+    long socketTimeout = OzoneConfigKeys
+        .OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT;
+    long connectionTimeout = OzoneConfigKeys
+        .OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT;
+    if (conf != null) {
+      socketTimeout = conf.getTimeDuration(
+          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT,
+          OzoneConfigKeys.OZONE_CLIENT_SOCKET_TIMEOUT_DEFAULT,
+          TimeUnit.MILLISECONDS);
+      connectionTimeout = conf.getTimeDuration(
+          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT,
+          OzoneConfigKeys.OZONE_CLIENT_CONNECTION_TIMEOUT_DEFAULT,
+          TimeUnit.MILLISECONDS);
+    }
+
+    CloseableHttpClient client = HttpClients.custom()
+        .setDefaultRequestConfig(
+            RequestConfig.custom()
+                .setSocketTimeout(Math.toIntExact(socketTimeout))
+                .setConnectTimeout(Math.toIntExact(connectionTimeout))
+                .build())
+        .build();
+    return client;
   }
 
   /**

+ 1 - 1
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/client/package-info.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.client;
+package org.apache.hadoop.hdds.scm.client;
 
 /**
  * Client facing classes for the container operations.

+ 1 - 1
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/package-info.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 /**
  * Classes for different type of container service client.

+ 9 - 9
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkInputStream.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkInputStream.java

@@ -16,7 +16,15 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm.storage;
+package org.apache.hadoop.hdds.scm.storage;
+
+import com.google.protobuf.ByteString;
+import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadChunkResponseProto;
 
 import java.io.EOFException;
 import java.io.IOException;
@@ -25,14 +33,6 @@ import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.List;
 
-import com.google.protobuf.ByteString;
-
-import org.apache.hadoop.fs.Seekable;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ReadChunkResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ChunkInfo;
-import org.apache.hadoop.scm.XceiverClientSpi;
-import org.apache.hadoop.scm.XceiverClientManager;
-
 /**
  * An {@link InputStream} used by the REST service in combination with the
  * SCMClient to read the value of a key from a sequence

+ 11 - 11
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/ChunkOutputStream.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/ChunkOutputStream.java

@@ -16,24 +16,24 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm.storage;
+package org.apache.hadoop.hdds.scm.storage;
 
-import static org.apache.hadoop.scm.storage.ContainerProtocolCalls.putKey;
-import static org.apache.hadoop.scm.storage.ContainerProtocolCalls.writeChunk;
+import com.google.protobuf.ByteString;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hdds.scm.XceiverClientManager;
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
 
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.util.UUID;
 
-import com.google.protobuf.ByteString;
-
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ChunkInfo;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.KeyData;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.KeyValue;
-import org.apache.hadoop.scm.XceiverClientManager;
-import org.apache.hadoop.scm.XceiverClientSpi;
+import static org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls.putKey;
+import static org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls
+    .writeChunk;
 
 /**
  * An {@link OutputStream} used by the REST service in combination with the

+ 1 - 1
hadoop-hdsl/client/src/main/java/org/apache/hadoop/scm/storage/package-info.java → hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.storage;
+package org.apache.hadoop.hdds.scm.storage;
 
 /**
  * Low level IO streams to upload/download chunks from container service.

+ 1 - 1
hadoop-hdsl/common/dev-support/findbugsExcludeFile.xml → hadoop-hdds/common/dev-support/findbugsExcludeFile.xml

@@ -16,6 +16,6 @@
 -->
 <FindBugsFilter>
   <Match>
-    <Package name="org.apache.hadoop.hdsl.protocol.proto"/>
+    <Package name="org.apache.hadoop.hdds.protocol.proto"/>
   </Match>
 </FindBugsFilter>

+ 6 - 7
hadoop-hdsl/common/pom.xml → hadoop-hdds/common/pom.xml

@@ -19,22 +19,21 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-hdsl</artifactId>
+    <artifactId>hadoop-hdds</artifactId>
     <version>3.2.0-SNAPSHOT</version>
   </parent>
-  <artifactId>hadoop-hdsl-common</artifactId>
+  <artifactId>hadoop-hdds-common</artifactId>
   <version>3.2.0-SNAPSHOT</version>
-  <description>Apache Hadoop HDSL Common utilities</description>
-  <name>Apache Hadoop HDSL Common</name>
+  <description>Apache Hadoop Distributed Data Store Common</description>
+  <name>Apache HDDS Common</name>
   <packaging>jar</packaging>
 
   <properties>
-    <hadoop.component>hdsl</hadoop.component>
+    <hadoop.component>hdds</hadoop.component>
     <is.hadoop.component>true</is.hadoop.component>
   </properties>
 
   <dependencies>
-
     <dependency>
       <groupId>org.fusesource.leveldbjni</groupId>
       <artifactId>leveldbjni-all</artifactId>
@@ -109,7 +108,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
                 <includes>
                   <include>StorageContainerLocationProtocol.proto</include>
                   <include>DatanodeContainerProtocol.proto</include>
-                  <include>hdsl.proto</include>
+                  <include>hdds.proto</include>
                   <include>ScmBlockLocationProtocol.proto</include>
                 </includes>
               </source>

+ 6 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java

@@ -0,0 +1,6 @@
+package org.apache.hadoop.hdds;
+
+public class HddsConfigKeys {
+  private HddsConfigKeys() {
+  }
+}

+ 17 - 18
hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/HdslUtils.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java

@@ -16,35 +16,34 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hdsl;
+package org.apache.hadoop.hdds;
 
-import java.net.InetSocketAddress;
-
-import java.nio.file.Paths;
-import java.util.Collection;
-import java.util.HashSet;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import com.google.common.net.HostAndPort;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.hdsl.conf.OzoneConfiguration;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
-import org.apache.hadoop.scm.ScmConfigKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.InetSocketAddress;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.HashSet;
 
-import com.google.common.base.Optional;
-import com.google.common.base.Strings;
-import com.google.common.net.HostAndPort;
 import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED;
 import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED_DEFAULT;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
- * HDSL specific stateless utility functions.
+ * HDDS specific stateless utility functions.
  */
-public class HdslUtils {
+public class HddsUtils {
 
 
-  private static final Logger LOG = LoggerFactory.getLogger(HdslUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(HddsUtils.class);
 
   /**
    * The service ID of the solitary Ozone SCM service.
@@ -55,7 +54,7 @@ public class HdslUtils {
 
   private static final int NO_PORT = -1;
 
-  private HdslUtils() {
+  private HddsUtils() {
   }
 
   /**
@@ -233,7 +232,7 @@ public class HdslUtils {
     return addresses;
   }
 
-  public static boolean isHdslEnabled(Configuration conf) {
+  public static boolean isHddsEnabled(Configuration conf) {
     String securityEnabled =
         conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
             "simple");

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/OzoneQuota.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/OzoneQuota.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.client;
+package org.apache.hadoop.hdds.client;
 
 import org.apache.hadoop.ozone.OzoneConsts;
 

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationFactor.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationFactor.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.client;
+package org.apache.hadoop.hdds.client;
 
 /**
  * The replication factor to be used while writing key into ozone.

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/client/ReplicationType.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ReplicationType.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.client;
+package org.apache.hadoop.hdds.client;
 
 /**
  * The replication type to be used while writing key into ozone.

+ 23 - 0
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/package-info.java

@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.client;
+
+/**
+ * Base property types for HDDS containers and replications.
+ */

+ 7 - 6
hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/OzoneConfiguration.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/OzoneConfiguration.java

@@ -16,11 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hdsl.conf;
+package org.apache.hadoop.hdds.conf;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
 
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import javax.xml.bind.Unmarshaller;
@@ -28,8 +28,9 @@ import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * Configuration for ozone.

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/conf/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/conf/package-info.java

@@ -15,4 +15,4 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hdsl.conf;
+package org.apache.hadoop.hdds.conf;

+ 2 - 2
hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/package-info.java

@@ -16,8 +16,8 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hdsl;
+package org.apache.hadoop.hdds;
 
 /**
- * Generic HDSL specific configurator and helper classes.
+ * Generic HDDS specific configurator and helper classes.
  */

+ 7 - 7
hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/DatanodeDetails.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/DatanodeDetails.java

@@ -16,12 +16,12 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hdsl.protocol;
+package org.apache.hadoop.hdds.protocol;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 
 import java.util.UUID;
 
@@ -222,7 +222,7 @@ public final class DatanodeDetails implements Comparable<DatanodeDetails> {
    * @return DatanodeDetails
    */
   public static DatanodeDetails getFromProtoBuf(
-      HdslProtos.DatanodeDetailsProto datanodeDetailsProto) {
+      HddsProtos.DatanodeDetailsProto datanodeDetailsProto) {
     DatanodeDetails.Builder builder = newBuilder();
     builder.setUuid(datanodeDetailsProto.getUuid());
     if (datanodeDetailsProto.hasIpAddress()) {
@@ -251,11 +251,11 @@ public final class DatanodeDetails implements Comparable<DatanodeDetails> {
 
   /**
    * Returns a DatanodeDetails protobuf message from a datanode ID.
-   * @return Hdsl.DatanodeDetailsProto
+   * @return HddsProtos.DatanodeDetailsProto
    */
-  public HdslProtos.DatanodeDetailsProto getProtoBufMessage() {
-    HdslProtos.DatanodeDetailsProto.Builder builder =
-        HdslProtos.DatanodeDetailsProto.newBuilder()
+  public HddsProtos.DatanodeDetailsProto getProtoBufMessage() {
+    HddsProtos.DatanodeDetailsProto.Builder builder =
+        HddsProtos.DatanodeDetailsProto.newBuilder()
             .setUuid(getUuidString());
     if (ipAddress != null) {
       builder.setIpAddress(ipAddress);

+ 2 - 2
hadoop-hdsl/common/src/main/java/org/apache/hadoop/hdsl/protocol/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/protocol/package-info.java

@@ -17,6 +17,6 @@
  */
 
 /**
- * This package contains HDSL protocol related classes.
+ * This package contains HDDS protocol related classes.
  */
-package org.apache.hadoop.hdsl.protocol;
+package org.apache.hadoop.hdds.protocol;

+ 13 - 13
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmConfigKeys.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java

@@ -15,7 +15,7 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -121,18 +121,18 @@ public final class ScmConfigKeys {
   public static final int OZONE_SCM_HTTP_BIND_PORT_DEFAULT = 9876;
   public static final int OZONE_SCM_HTTPS_BIND_PORT_DEFAULT = 9877;
 
-  public static final String HDSL_REST_HTTP_ADDRESS_KEY =
-      "hdsl.rest.http-address";
-  public static final String HDSL_REST_HTTP_ADDRESS_DEFAULT = "0.0.0.0:9880";
-  public static final String HDSL_REST_CSRF_ENABLED_KEY =
-      "hdsl.rest.rest-csrf.enabled";
-  public static final boolean HDSL_REST_CSRF_ENABLED_DEFAULT = false;
-  public static final String HDSL_REST_NETTY_HIGH_WATERMARK =
-      "hdsl.rest.netty.high.watermark";
-  public static final int HDSL_REST_NETTY_HIGH_WATERMARK_DEFAULT = 65536;
-  public static final int HDSL_REST_NETTY_LOW_WATERMARK_DEFAULT = 32768;
-  public static final String HDSL_REST_NETTY_LOW_WATERMARK =
-      "hdsl.rest.netty.low.watermark";
+  public static final String HDDS_REST_HTTP_ADDRESS_KEY =
+      "hdds.rest.http-address";
+  public static final String HDDS_REST_HTTP_ADDRESS_DEFAULT = "0.0.0.0:9880";
+  public static final String HDDS_REST_CSRF_ENABLED_KEY =
+      "hdds.rest.rest-csrf.enabled";
+  public static final boolean HDDS_REST_CSRF_ENABLED_DEFAULT = false;
+  public static final String HDDS_REST_NETTY_HIGH_WATERMARK =
+      "hdds.rest.netty.high.watermark";
+  public static final int HDDS_REST_NETTY_HIGH_WATERMARK_DEFAULT = 65536;
+  public static final int HDDS_REST_NETTY_LOW_WATERMARK_DEFAULT = 32768;
+  public static final String HDDS_REST_NETTY_LOW_WATERMARK =
+      "hdds.rest.netty.low.watermark";
 
   public static final String OZONE_SCM_HANDLER_COUNT_KEY =
       "ozone.scm.handler.count.key";

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/ScmInfo.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmInfo.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 /**
  * ScmInfo wraps the result returned from SCM#getScmInfo which

+ 7 - 7
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/XceiverClientSpi.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientSpi.java

@@ -16,16 +16,16 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ContainerCommandRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ContainerCommandResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -125,5 +125,5 @@ public abstract class XceiverClientSpi implements Closeable {
    *
    * @return - {Stand_Alone, Ratis or Chained}
    */
-  public abstract HdslProtos.ReplicationType getPipelineType();
+  public abstract HddsProtos.ReplicationType getPipelineType();
 }

+ 11 - 11
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/ScmClient.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java

@@ -15,13 +15,13 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.scm.client;
+package org.apache.hadoop.hdds.scm.client;
 
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ContainerData;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ContainerData;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 
 import java.io.IOException;
 import java.util.EnumSet;
@@ -111,8 +111,8 @@ public interface ScmClient {
    * @return Pipeline
    * @throws IOException - in case of error.
    */
-  Pipeline createContainer(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor replicationFactor, String containerId,
+  Pipeline createContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor replicationFactor, String containerId,
       String owner) throws IOException;
 
   /**
@@ -123,8 +123,8 @@ public interface ScmClient {
    * @return A set of nodes that meet the requested criteria.
    * @throws IOException
    */
-  HdslProtos.NodePool queryNode(EnumSet<HdslProtos.NodeState> nodeStatuses,
-      HdslProtos.QueryScope queryScope, String poolName) throws IOException;
+  HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState> nodeStatuses,
+      HddsProtos.QueryScope queryScope, String poolName) throws IOException;
 
   /**
    * Creates a specified replication pipeline.
@@ -133,7 +133,7 @@ public interface ScmClient {
    * @param nodePool - Set of machines.
    * @throws IOException
    */
-  Pipeline createReplicationPipeline(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool)
+  Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
       throws IOException;
 }

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/client/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.client;
+package org.apache.hadoop.hdds.scm.client;
 
 /**
  * This package contains classes for the client of the storage container

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/ContainerStates/ContainerID.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerID.java

@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.hadoop.ozone.scm.container.ContainerStates;
+package org.apache.hadoop.hdds.scm.container;
 
 import com.google.common.base.Preconditions;
 import org.apache.commons.math3.util.MathUtils;

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/AllocatedBlock.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/AllocatedBlock.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 
 /**
  * Allocated block wraps the result returned from SCM#allocateBlock which

+ 13 - 13
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/ContainerInfo.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/ContainerInfo.java

@@ -16,13 +16,13 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 
 import com.google.common.base.Preconditions;
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.ozone.scm.container.ContainerStates.ContainerID;
+import org.apache.hadoop.hdds.scm.container.ContainerID;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.util.Time;
 
 import java.util.Comparator;
@@ -32,7 +32,7 @@ import java.util.Comparator;
  */
 public class ContainerInfo
     implements Comparator<ContainerInfo>, Comparable<ContainerInfo> {
-  private HdslProtos.LifeCycleState state;
+  private HddsProtos.LifeCycleState state;
   private Pipeline pipeline;
   // Bytes allocated by SCM for clients.
   private long allocatedBytes;
@@ -48,7 +48,7 @@ public class ContainerInfo
   ContainerInfo(
       long containerID,
       final String containerName,
-      HdslProtos.LifeCycleState state,
+      HddsProtos.LifeCycleState state,
       Pipeline pipeline,
       long allocatedBytes,
       long usedBytes,
@@ -73,7 +73,7 @@ public class ContainerInfo
   public ContainerInfo() {
   }
 
-  public static ContainerInfo fromProtobuf(HdslProtos.SCMContainerInfo info) {
+  public static ContainerInfo fromProtobuf(HddsProtos.SCMContainerInfo info) {
     ContainerInfo.Builder builder = new ContainerInfo.Builder();
     builder.setPipeline(Pipeline.getFromProtoBuf(info.getPipeline()));
     builder.setAllocatedBytes(info.getAllocatedBytes());
@@ -95,11 +95,11 @@ public class ContainerInfo
     return containerName;
   }
 
-  public HdslProtos.LifeCycleState getState() {
+  public HddsProtos.LifeCycleState getState() {
     return state;
   }
 
-  public void setState(HdslProtos.LifeCycleState state) {
+  public void setState(HddsProtos.LifeCycleState state) {
     this.state = state;
   }
 
@@ -156,9 +156,9 @@ public class ContainerInfo
     allocatedBytes += size;
   }
 
-  public HdslProtos.SCMContainerInfo getProtobuf() {
-    HdslProtos.SCMContainerInfo.Builder builder =
-        HdslProtos.SCMContainerInfo.newBuilder();
+  public HddsProtos.SCMContainerInfo getProtobuf() {
+    HddsProtos.SCMContainerInfo.Builder builder =
+        HddsProtos.SCMContainerInfo.newBuilder();
     builder.setPipeline(getPipeline().getProtobufMessage());
     builder.setAllocatedBytes(getAllocatedBytes());
     builder.setUsedBytes(getUsedBytes());
@@ -268,7 +268,7 @@ public class ContainerInfo
    * Builder class for ContainerInfo.
    */
   public static class Builder {
-    private HdslProtos.LifeCycleState state;
+    private HddsProtos.LifeCycleState state;
     private Pipeline pipeline;
     private long allocated;
     private long used;
@@ -284,7 +284,7 @@ public class ContainerInfo
       return this;
     }
 
-    public Builder setState(HdslProtos.LifeCycleState lifeCycleState) {
+    public Builder setState(HddsProtos.LifeCycleState lifeCycleState) {
       this.state = lifeCycleState;
       return this;
     }

+ 2 - 3
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/DeleteBlockResult.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeleteBlockResult.java

@@ -15,10 +15,9 @@
  * the License.
  */
 
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 
-
-import static org.apache.hadoop.hdsl.protocol.proto
+import static org.apache.hadoop.hdds.protocol.proto
     .ScmBlockLocationProtocolProtos.DeleteScmBlockResult;
 
 /**

+ 10 - 10
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/Pipeline.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/Pipeline.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonFilter;
@@ -29,8 +29,8 @@ import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
 import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
-import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -83,14 +83,14 @@ public class Pipeline {
    * @param pipeline - ProtoBuf definition for the pipeline.
    * @return Pipeline Object
    */
-  public static Pipeline getFromProtoBuf(HdslProtos.Pipeline pipeline) {
+  public static Pipeline getFromProtoBuf(HddsProtos.Pipeline pipeline) {
     Preconditions.checkNotNull(pipeline);
     PipelineChannel pipelineChannel =
         PipelineChannel.getFromProtoBuf(pipeline.getPipelineChannel());
     return new Pipeline(pipeline.getContainerName(), pipelineChannel);
   }
 
-  public HdslProtos.ReplicationFactor getFactor() {
+  public HddsProtos.ReplicationFactor getFactor() {
     return pipelineChannel.getFactor();
   }
 
@@ -143,9 +143,9 @@ public class Pipeline {
    * @return Protobuf message
    */
   @JsonIgnore
-  public HdslProtos.Pipeline getProtobufMessage() {
-    HdslProtos.Pipeline.Builder builder =
-        HdslProtos.Pipeline.newBuilder();
+  public HddsProtos.Pipeline getProtobufMessage() {
+    HddsProtos.Pipeline.Builder builder =
+        HddsProtos.Pipeline.newBuilder();
     builder.setContainerName(this.containerName);
     builder.setPipelineChannel(this.pipelineChannel.getProtobufMessage());
     return builder.build();
@@ -194,7 +194,7 @@ public class Pipeline {
    *
    * @return - LifeCycleStates.
    */
-  public HdslProtos.LifeCycleState getLifeCycleState() {
+  public HddsProtos.LifeCycleState getLifeCycleState() {
     return pipelineChannel.getLifeCycleState();
   }
 
@@ -212,7 +212,7 @@ public class Pipeline {
    *
    * @return type - Standalone, Ratis, Chained.
    */
-  public HdslProtos.ReplicationType getType() {
+  public HddsProtos.ReplicationType getType() {
     return pipelineChannel.getType();
   }
 

+ 11 - 11
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/PipelineChannel.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/PipelineChannel.java

@@ -15,15 +15,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.google.common.base.Preconditions;
-import org.apache.hadoop.hdsl.protocol.DatanodeDetails;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.LifeCycleState;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.DatanodeDetails;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
 
 import java.util.Map;
 import java.util.TreeMap;
@@ -82,9 +82,9 @@ public class PipelineChannel {
   }
 
   @JsonIgnore
-  public HdslProtos.PipelineChannel getProtobufMessage() {
-    HdslProtos.PipelineChannel.Builder builder =
-        HdslProtos.PipelineChannel.newBuilder();
+  public HddsProtos.PipelineChannel getProtobufMessage() {
+    HddsProtos.PipelineChannel.Builder builder =
+        HddsProtos.PipelineChannel.newBuilder();
     for (DatanodeDetails datanode : datanodes.values()) {
       builder.addMembers(datanode.getProtoBufMessage());
     }
@@ -104,7 +104,7 @@ public class PipelineChannel {
   }
 
   public static PipelineChannel getFromProtoBuf(
-      HdslProtos.PipelineChannel transportProtos) {
+      HddsProtos.PipelineChannel transportProtos) {
     Preconditions.checkNotNull(transportProtos);
     PipelineChannel pipelineChannel =
         new PipelineChannel(transportProtos.getLeaderID(),
@@ -113,7 +113,7 @@ public class PipelineChannel {
             transportProtos.getFactor(),
             transportProtos.getName());
 
-    for (HdslProtos.DatanodeDetailsProto dataID :
+    for (HddsProtos.DatanodeDetailsProto dataID :
         transportProtos.getMembersList()) {
       pipelineChannel.addMember(DatanodeDetails.getFromProtoBuf(dataID));
     }

+ 2 - 2
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/StorageContainerException.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/StorageContainerException.java

@@ -15,9 +15,9 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
 
 import java.io.IOException;
 

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/container/common/helpers/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/package-info.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.scm.container.common.helpers;
+package org.apache.hadoop.hdds.scm.container.common.helpers;
 /**
  Contains protocol buffer helper classes and utilites used in
  impl.

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm;
+package org.apache.hadoop.hdds.scm;
 
 /**
  * This package contains classes for the client of the storage container

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/LocatedContainer.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/LocatedContainer.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.protocol;
+package org.apache.hadoop.hdds.scm.protocol;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

+ 8 - 8
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmBlockLocationProtocol.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java

@@ -15,19 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.scm.protocol;
+package org.apache.hadoop.hdds.scm.protocol;
+
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
+import org.apache.hadoop.ozone.common.BlockGroup;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
 
 import java.io.IOException;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
-import org.apache.hadoop.ozone.common.BlockGroup;
-import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationType;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.ReplicationFactor;
-import org.apache.hadoop.scm.ScmInfo;
-
 /**
  * ScmBlockLocationProtocol is used by an HDFS node to find the set of nodes
  * to read/write a block.

+ 4 - 4
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/ScmLocatedBlock.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmLocatedBlock.java

@@ -16,14 +16,14 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.protocol;
-
-import java.util.List;
-import java.util.stream.Collectors;
+package org.apache.hadoop.hdds.scm.protocol;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 
+import java.util.List;
+import java.util.stream.Collectors;
+
 /**
  * Holds the nodes that currently host the block for a block key.
  */

+ 14 - 13
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocol/StorageContainerLocationProtocol.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java

@@ -15,18 +15,19 @@
  * the License.
  */
 
-package org.apache.hadoop.scm.protocol;
+package org.apache.hadoop.hdds.scm.protocol;
+
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
 
 import java.io.IOException;
 import java.util.EnumSet;
 import java.util.List;
 
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
-import org.apache.hadoop.scm.ScmInfo;
-import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-
 /**
  * ContainerLocationProtocol is used by an HDFS node to find the set of nodes
  * that currently host a container.
@@ -37,8 +38,8 @@ public interface StorageContainerLocationProtocol {
    * set of datanodes that should be used creating this container.
    *
    */
-  Pipeline allocateContainer(HdslProtos.ReplicationType replicationType,
-      HdslProtos.ReplicationFactor factor, String containerName, String owner)
+  Pipeline allocateContainer(HddsProtos.ReplicationType replicationType,
+      HddsProtos.ReplicationFactor factor, String containerName, String owner)
       throws IOException;
 
   /**
@@ -85,8 +86,8 @@ public interface StorageContainerLocationProtocol {
    * @param nodeStatuses
    * @return List of Datanodes.
    */
-  HdslProtos.NodePool queryNode(EnumSet<HdslProtos.NodeState> nodeStatuses,
-      HdslProtos.QueryScope queryScope, String poolName) throws IOException;
+  HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState> nodeStatuses,
+      HddsProtos.QueryScope queryScope, String poolName) throws IOException;
 
   /**
    * Notify from client when begin or finish creating objects like pipeline
@@ -109,8 +110,8 @@ public interface StorageContainerLocationProtocol {
    * @param nodePool - optional machine list to build a pipeline.
    * @throws IOException
    */
-  Pipeline createReplicationPipeline(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor factor, HdslProtos.NodePool nodePool)
+  Pipeline createReplicationPipeline(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, HddsProtos.NodePool nodePool)
       throws IOException;
 
   /**

+ 1 - 1
hadoop-hdsl/tools/src/main/java/org/apache/hadoop/ozone/scm/cli/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/package-info.java

@@ -16,4 +16,4 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.ozone.scm.cli;
+package org.apache.hadoop.hdds.scm.protocol;

+ 27 - 19
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolClientSideTranslatorPB.java

@@ -14,31 +14,39 @@
  * License for the specific language governing permissions and limitations under
  * the License.
  */
-package org.apache.hadoop.scm.protocolPB;
+package org.apache.hadoop.hdds.scm.protocolPB;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .AllocateScmBlockRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .AllocateScmBlockResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmKeyBlocksRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmKeyBlocksResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .GetScmBlockLocationsRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .GetScmBlockLocationsResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .KeyBlocks;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .ScmLocatedBlockProto;
 import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
 import org.apache.hadoop.ozone.common.BlockGroup;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmKeyBlocksRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.AllocateScmBlockRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.AllocateScmBlockResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmKeyBlocksResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.GetScmBlockLocationsRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.GetScmBlockLocationsResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.ScmLocatedBlockProto;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.KeyBlocks;
-import org.apache.hadoop.scm.container.common.helpers.AllocatedBlock;
-import org.apache.hadoop.scm.ScmInfo;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
-import org.apache.hadoop.scm.protocol.ScmBlockLocationProtocol;
+import org.apache.hadoop.ozone.common.DeleteBlockGroupResult;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -117,7 +125,7 @@ public final class ScmBlockLocationProtocolClientSideTranslatorPB
    */
   @Override
   public AllocatedBlock allocateBlock(long size,
-      HdslProtos.ReplicationType type, HdslProtos.ReplicationFactor factor,
+      HddsProtos.ReplicationType type, HddsProtos.ReplicationFactor factor,
       String owner) throws IOException {
     Preconditions.checkArgument(size > 0, "block size must be greater than 0");
 
@@ -181,9 +189,9 @@ public final class ScmBlockLocationProtocolClientSideTranslatorPB
    */
   @Override
   public ScmInfo getScmInfo() throws IOException {
-    HdslProtos.GetScmInfoRequestProto request =
-        HdslProtos.GetScmInfoRequestProto.getDefaultInstance();
-    HdslProtos.GetScmInfoRespsonseProto resp;
+    HddsProtos.GetScmInfoRequestProto request =
+        HddsProtos.GetScmInfoRequestProto.getDefaultInstance();
+    HddsProtos.GetScmInfoRespsonseProto resp;
     try {
       resp = rpcProxy.getScmInfo(NULL_RPC_CONTROLLER, request);
     } catch (ServiceException e) {

+ 3 - 3
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/ScmBlockLocationProtocolPB.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java

@@ -15,12 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.scm.protocolPB;
+package org.apache.hadoop.hdds.scm.protocolPB;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.ipc.ProtocolInfo;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
     .ScmBlockLocationProtocolService;
+import org.apache.hadoop.ipc.ProtocolInfo;
 
 /**
  * Protocol used from an HDFS node to StorageContainerManager.  This extends the

+ 40 - 28
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java

@@ -14,33 +14,45 @@
  * License for the specific language governing permissions and limitations under
  * the License.
  */
-package org.apache.hadoop.scm.protocolPB;
+package org.apache.hadoop.hdds.scm.protocolPB;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.scm.ScmInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo;
+import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline;
+import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.GetContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.GetContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.NodeQueryRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.NodeQueryResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.PipelineRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.PipelineResponseProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMListContainerRequestProto;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos.SCMListContainerResponseProto;
 import org.apache.hadoop.ipc.ProtobufHelper;
 import org.apache.hadoop.ipc.ProtocolTranslator;
 import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos;
-import org.apache.hadoop.scm.ScmInfo;
-import org.apache.hadoop.scm.container.common.helpers.ContainerInfo;
-import org.apache.hadoop.scm.protocol.StorageContainerLocationProtocol;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ContainerRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ContainerResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMDeleteContainerRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMListContainerRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.SCMListContainerResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.NodeQueryRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.NodeQueryResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.ObjectStageChangeRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.PipelineRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.PipelineResponseProto;
-import org.apache.hadoop.scm.container.common.helpers.Pipeline;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -85,8 +97,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
    * @throws IOException
    */
   @Override
-  public Pipeline allocateContainer(HdslProtos.ReplicationType type,
-      HdslProtos.ReplicationFactor factor, String
+  public Pipeline allocateContainer(HddsProtos.ReplicationType type,
+      HddsProtos.ReplicationFactor factor, String
       containerName, String owner) throws IOException {
 
     Preconditions.checkNotNull(containerName, "Container Name cannot be Null");
@@ -151,7 +163,7 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
       SCMListContainerResponseProto response =
           rpcProxy.listContainer(NULL_RPC_CONTROLLER, request);
       List<ContainerInfo> containerList = new ArrayList<>();
-      for (HdslProtos.SCMContainerInfo containerInfoProto : response
+      for (HddsProtos.SCMContainerInfo containerInfoProto : response
           .getContainersList()) {
         containerList.add(ContainerInfo.fromProtobuf(containerInfoProto));
       }
@@ -191,8 +203,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
    * @return List of Datanodes.
    */
   @Override
-  public HdslProtos.NodePool queryNode(EnumSet<HdslProtos.NodeState>
-      nodeStatuses, HdslProtos.QueryScope queryScope, String poolName)
+  public HddsProtos.NodePool queryNode(EnumSet<HddsProtos.NodeState>
+      nodeStatuses, HddsProtos.QueryScope queryScope, String poolName)
       throws IOException {
     // TODO : We support only cluster wide query right now. So ignoring checking
     // queryScope and poolName
@@ -248,8 +260,8 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
    * @throws IOException
    */
   @Override
-  public Pipeline createReplicationPipeline(HdslProtos.ReplicationType
-      replicationType, HdslProtos.ReplicationFactor factor, HdslProtos
+  public Pipeline createReplicationPipeline(HddsProtos.ReplicationType
+      replicationType, HddsProtos.ReplicationFactor factor, HddsProtos
       .NodePool nodePool) throws IOException {
     PipelineRequestProto request = PipelineRequestProto.newBuilder()
         .setNodePool(nodePool)
@@ -277,10 +289,10 @@ public final class StorageContainerLocationProtocolClientSideTranslatorPB
 
   @Override
   public ScmInfo getScmInfo() throws IOException {
-    HdslProtos.GetScmInfoRequestProto request =
-        HdslProtos.GetScmInfoRequestProto.getDefaultInstance();
+    HddsProtos.GetScmInfoRequestProto request =
+        HddsProtos.GetScmInfoRequestProto.getDefaultInstance();
     try {
-      HdslProtos.GetScmInfoRespsonseProto resp = rpcProxy.getScmInfo(
+      HddsProtos.GetScmInfoRespsonseProto resp = rpcProxy.getScmInfo(
           NULL_RPC_CONTROLLER, request);
       ScmInfo.Builder builder = new ScmInfo.Builder()
           .setClusterId(resp.getClusterId())

+ 4 - 2
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/StorageContainerLocationProtocolPB.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java

@@ -15,11 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.scm.protocolPB;
+package org.apache.hadoop.hdds.scm.protocolPB;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto
+    .StorageContainerLocationProtocolProtos
+    .StorageContainerLocationProtocolService;
 import org.apache.hadoop.ipc.ProtocolInfo;
-import org.apache.hadoop.hdsl.protocol.proto.StorageContainerLocationProtocolProtos.StorageContainerLocationProtocolService;
 
 /**
  * Protocol used from an HDFS node to StorageContainerManager.  This extends the

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/protocolPB/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.protocolPB;
+package org.apache.hadoop.hdds.scm.protocolPB;
 
 /**
  * This package contains classes for the client of the storage container

+ 24 - 25
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/ContainerProtocolCalls.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/ContainerProtocolCalls.java

@@ -16,44 +16,43 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.scm.storage;
+package org.apache.hadoop.hdds.scm.storage;
 
 import com.google.protobuf.ByteString;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.ChunkInfo;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.scm.XceiverClientSpi;
+import org.apache.hadoop.hdds.scm.container.common.helpers
+    .StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.ChunkInfo;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ContainerCommandRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ContainerCommandResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
-    .GetKeyRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.GetKeyRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .GetKeyResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .GetSmallFileRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .GetSmallFileResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.KeyData;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
-    .PutKeyRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.KeyData;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.PutKeyRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .PutSmallFileRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ReadChunkRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ReadChunkResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos.Type;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
-    .WriteChunkRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
-    .ReadContainerResponseProto;
-import org.apache.hadoop.hdsl.protocol.proto.ContainerProtos
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
     .ReadContainerRequestProto;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.KeyValue;
-import org.apache.hadoop.scm.container.common.helpers.StorageContainerException;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .ReadContainerResponseProto;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos.Type;
+import org.apache.hadoop.hdds.protocol.proto.ContainerProtos
+    .WriteChunkRequestProto;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.KeyValue;
 
 import java.io.IOException;
-import org.apache.hadoop.scm.XceiverClientSpi;
 
 /**
  * Implementation of all container protocol calls performed by Container

+ 1 - 1
hadoop-hdsl/common/src/main/java/org/apache/hadoop/scm/storage/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/storage/package-info.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.scm.storage;
+package org.apache.hadoop.hdds.scm.storage;
 
 /**
  * This package contains StorageContainerManager classes.

+ 0 - 0
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneAcl.java


+ 3 - 4
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java

@@ -20,10 +20,9 @@ package org.apache.hadoop.ozone;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.ozone.client.ReplicationFactor;
-import org.apache.hadoop.ozone.client.ReplicationType;
-
-import org.apache.hadoop.scm.ScmConfigKeys;
+import org.apache.hadoop.hdds.client.ReplicationFactor;
+import org.apache.hadoop.hdds.client.ReplicationType;
+import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 
 /**
  * This class contains constants for configuration keys used in Ozone.

+ 0 - 0
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java


+ 2 - 2
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/BlockGroup.java

@@ -17,8 +17,8 @@
 
 package org.apache.hadoop.ozone.common;
 
-import org.apache.hadoop.hdsl.protocol.proto
-    .ScmBlockLocationProtocolProtos.KeyBlocks;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .KeyBlocks;
 
 import java.util.List;
 

+ 5 - 3
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/DeleteBlockGroupResult.java

@@ -17,9 +17,11 @@
  */
 package org.apache.hadoop.ozone.common;
 
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmBlockResult;
-import org.apache.hadoop.hdsl.protocol.proto.ScmBlockLocationProtocolProtos.DeleteScmBlockResult.Result;
-import org.apache.hadoop.scm.container.common.helpers.DeleteBlockResult;
+import org.apache.hadoop.hdds.scm.container.common.helpers.DeleteBlockResult;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmBlockResult;
+import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos
+    .DeleteScmBlockResult.Result;
 
 import java.util.ArrayList;
 import java.util.List;

+ 3 - 3
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/InconsistentStorageStateException.java

@@ -17,12 +17,12 @@
   */
 package org.apache.hadoop.ozone.common;
 
-import java.io.File;
-import java.io.IOException;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+import java.io.File;
+import java.io.IOException;
+
 /**
  * The exception is thrown when file system state is inconsistent
  * and is not recoverable.

+ 7 - 8
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java

@@ -17,6 +17,13 @@
  */
 package org.apache.hadoop.ozone.common;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+import org.apache.hadoop.util.Time;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.DirectoryStream;
@@ -24,14 +31,6 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Properties;
 
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeType;
-import org.apache.hadoop.util.Time;
-
 /**
  * Storage information file. This Class defines the methods to check
  * the consistency of the storage dir and the version file.

+ 6 - 7
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/StorageInfo.java

@@ -17,19 +17,18 @@
  */
 package org.apache.hadoop.ozone.common;
 
-import java.io.IOException;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType;
+
+import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
-import java.io.File;
+import java.io.IOException;
 import java.io.RandomAccessFile;
-
 import java.util.Properties;
 import java.util.UUID;
 
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hdsl.protocol.proto.HdslProtos.NodeType;
-
 /**
  * Common class for storage information. This class defines the common
  * properties and functions to set them , write them into the version file

+ 0 - 0
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/package-info.java


+ 0 - 0
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/InvalidStateTransitionException.java


+ 0 - 0
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/StateMachine.java


+ 0 - 0
hadoop-hdsl/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/statemachine/package-info.java


Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä