Browse Source

HDDS-2143. Rename classes under package org.apache.hadoop.utils

Closes #1465
Bharat Viswanadham 5 years ago
parent
commit
6d4b20c047
100 changed files with 167 additions and 174 deletions
  1. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java
  2. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/HddsVersionProvider.java
  3. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundService.java
  4. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundTask.java
  5. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundTaskQueue.java
  6. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundTaskResult.java
  7. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BatchOperation.java
  8. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/EntryConsumer.java
  9. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/HddsVersionInfo.java
  10. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStore.java
  11. 4 7
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java
  12. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetaStoreIterator.java
  13. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetadataKeyFilters.java
  14. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetadataStore.java
  15. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetadataStoreBuilder.java
  16. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RetriableTask.java
  17. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStore.java
  18. 4 6
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java
  19. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreMBean.java
  20. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/Scheduler.java
  21. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/UniqueId.java
  22. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/VersionInfo.java
  23. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/BatchOperation.java
  24. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/ByteArrayKeyValue.java
  25. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java
  26. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecRegistry.java
  27. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBCheckpoint.java
  28. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBConfigFromFile.java
  29. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBProfile.java
  30. 2 2
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBStore.java
  31. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java
  32. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBUpdatesWrapper.java
  33. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IntegerCodec.java
  34. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java
  35. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBBatchOperation.java
  36. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBCheckpointManager.java
  37. 3 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java
  38. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java
  39. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java
  40. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RocksDBCheckpoint.java
  41. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/SequenceNumberNotFoundException.java
  42. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java
  43. 3 3
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java
  44. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/TableConfig.java
  45. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java
  46. 10 10
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java
  47. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/CacheKey.java
  48. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/CacheResult.java
  49. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/CacheValue.java
  50. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/EpochEntry.java
  51. 5 6
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java
  52. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCacheImpl.java
  53. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/package-info.java
  54. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/package-info.java
  55. 1 1
      hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/package-info.java
  56. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestHddsIdFactory.java
  57. 5 6
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
  58. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestRetriableTask.java
  59. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestRocksDBStoreMBean.java
  60. 2 2
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestDBConfigFromFile.java
  61. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestDBStoreBuilder.java
  62. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java
  63. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java
  64. 4 4
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java
  65. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/cache/TestTableCacheImpl.java
  66. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/cache/package-info.java
  67. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/package-info.java
  68. 1 1
      hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/package-info.java
  69. 1 1
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java
  70. 2 2
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java
  71. 1 1
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ReferenceCountedDB.java
  72. 4 4
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueBlockIterator.java
  73. 3 3
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java
  74. 2 2
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/BlockManagerImpl.java
  75. 6 6
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/statemachine/background/BlockDeletingService.java
  76. 1 1
      hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java
  77. 2 2
      hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestContainerCache.java
  78. 1 1
      hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueBlockIterator.java
  79. 1 1
      hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java
  80. 1 1
      hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfoImpl.java
  81. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java
  82. 3 3
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java
  83. 4 4
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java
  84. 3 3
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/SCMContainerManager.java
  85. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationActivityStatus.java
  86. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java
  87. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/DeletedBlocksTransactionCodec.java
  88. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/LongCodec.java
  89. 3 3
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java
  90. 5 5
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreRDBImpl.java
  91. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java
  92. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/BackgroundPipelineCreator.java
  93. 4 4
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java
  94. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java
  95. 1 1
      hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java
  96. 3 3
      hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
  97. 2 2
      hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMMetadataManager.java
  98. 1 1
      hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/codec/OmBucketInfoCodec.java
  99. 1 1
      hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/codec/OmKeyInfoCodec.java
  100. 1 1
      hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/codec/OmMultipartKeyInfoCodec.java

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsConfigKeys.java

@@ -16,7 +16,7 @@
  */
 package org.apache.hadoop.hdds;
 
-import org.apache.hadoop.utils.db.DBProfile;
+import org.apache.hadoop.hdds.utils.db.DBProfile;
 
 /**
  * This class contains constants for configuration keys and default values

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/cli/HddsVersionProvider.java

@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hdds.cli;
 
-import org.apache.hadoop.utils.HddsVersionInfo;
+import org.apache.hadoop.hdds.utils.HddsVersionInfo;
 
 import picocli.CommandLine.IVersionProvider;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundService.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundService.java

@@ -15,7 +15,7 @@
  * the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Lists;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTask.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundTask.java

@@ -15,7 +15,7 @@
  * the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import java.util.concurrent.Callable;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskQueue.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundTaskQueue.java

@@ -15,7 +15,7 @@
  * the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import java.util.PriorityQueue;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BackgroundTaskResult.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BackgroundTaskResult.java

@@ -15,7 +15,7 @@
  * the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 /**
  * Result of a {@link BackgroundTask}.

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/BatchOperation.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/BatchOperation.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import com.google.common.collect.Lists;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/EntryConsumer.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/EntryConsumer.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import java.io.IOException;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/HddsVersionInfo.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/HddsVersionInfo.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;

+ 2 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStore.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStore.java

@@ -16,10 +16,10 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
-import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
 import org.fusesource.leveldbjni.JniDBFactory;
 import org.iq80.leveldb.DB;
 import org.iq80.leveldb.DBIterator;

+ 4 - 7
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/LevelDBStoreIterator.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java

@@ -16,19 +16,16 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.iq80.leveldb.DBIterator;
 import java.util.Map;
 import java.util.NoSuchElementException;
 
-import org.apache.hadoop.utils.MetadataStore.KeyValue;
-
-
 /**
  * LevelDB store iterator.
  */
-public class LevelDBStoreIterator implements MetaStoreIterator<KeyValue> {
+public class LevelDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
 
 
   private DBIterator levelDBIterator;
@@ -44,10 +41,10 @@ public class LevelDBStoreIterator implements MetaStoreIterator<KeyValue> {
   }
 
   @Override
-  public KeyValue next() {
+  public MetadataStore.KeyValue next() {
     if(levelDBIterator.hasNext()) {
       Map.Entry<byte[], byte[]> entry = levelDBIterator.next();
-      return KeyValue.create(entry.getKey(), entry.getValue());
+      return MetadataStore.KeyValue.create(entry.getKey(), entry.getValue());
     }
     throw new NoSuchElementException("LevelDB Store has no more elements");
   }

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetaStoreIterator.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetaStoreIterator.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import java.util.Iterator;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataKeyFilters.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetadataKeyFilters.java

@@ -15,7 +15,7 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;

+ 2 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStore.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetadataStore.java

@@ -16,11 +16,11 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
 
 import java.io.Closeable;
 import java.io.IOException;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/MetadataStoreBuilder.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/MetadataStoreBuilder.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import java.io.File;
 import java.io.IOException;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RetriableTask.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RetriableTask.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.util.ThreadUtil;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStore.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStore.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import com.google.common.base.Preconditions;
 import org.apache.commons.io.FileUtils;

+ 4 - 6
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreIterator.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java

@@ -17,18 +17,16 @@
  */
 
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.rocksdb.RocksIterator;
 
 import java.util.NoSuchElementException;
 
-import org.apache.hadoop.utils.MetadataStore.KeyValue;
-
 /**
  * RocksDB store iterator.
  */
-public class RocksDBStoreIterator implements MetaStoreIterator<KeyValue> {
+public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
 
   private RocksIterator rocksDBIterator;
 
@@ -43,9 +41,9 @@ public class RocksDBStoreIterator implements MetaStoreIterator<KeyValue> {
   }
 
   @Override
-  public KeyValue next() {
+  public MetadataStore.KeyValue next() {
     if (rocksDBIterator.isValid()) {
-      KeyValue value = KeyValue.create(rocksDBIterator.key(), rocksDBIterator
+      MetadataStore.KeyValue value = MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
           .value());
       rocksDBIterator.next();
       return value;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/RocksDBStoreMBean.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreMBean.java

@@ -16,7 +16,7 @@
  *  limitations under the License.
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.hadoop.metrics2.MetricsCollector;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/Scheduler.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/Scheduler.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.ratis.util.function.CheckedRunnable;
 import org.slf4j.Logger;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/UniqueId.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/UniqueId.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.hadoop.hdds.HddsUtils;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/VersionInfo.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/VersionInfo.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/BatchOperation.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/BatchOperation.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 /**
  * Class represents a batch operation, collects multiple db operation.

+ 2 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/ByteArrayKeyValue.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/ByteArrayKeyValue.java

@@ -16,9 +16,9 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
-import org.apache.hadoop.utils.db.Table.KeyValue;
+import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
 
 /**
  * Key value for raw Table implementations.

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/Codec.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Codec.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/CodecRegistry.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/CodecRegistry.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.util.HashMap;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBCheckpoint.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBCheckpoint.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.nio.file.Path;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBConfigFromFile.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBConfigFromFile.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import com.google.common.base.Preconditions;
 import org.eclipse.jetty.util.StringUtil;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBProfile.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBProfile.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import org.apache.hadoop.conf.StorageUnit;
 import org.rocksdb.BlockBasedTableConfig;

+ 2 - 2
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStore.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBStore.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.File;
 import java.io.IOException;
@@ -25,7 +25,7 @@ import java.util.ArrayList;
 import java.util.Map;
 
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.utils.db.cache.TableCacheImpl;
+import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
 
 /**
  * The DBStore interface provides the ability to create Tables, which store

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStoreBuilder.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBStoreBuilder.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.conf.Configuration;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBUpdatesWrapper.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/DBUpdatesWrapper.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.util.ArrayList;
 import java.util.List;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/IntegerCodec.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/IntegerCodec.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/LongCodec.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/LongCodec.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import com.google.common.primitives.Longs;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBBatchOperation.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBBatchOperation.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBCheckpointManager.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBCheckpointManager.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.nio.file.Path;
 import java.nio.file.Paths;

+ 3 - 3
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBStore.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStore.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_CHECKPOINTS_DIR_NAME;
 
@@ -33,12 +33,12 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hdds.HddsUtils;
+import org.apache.hadoop.hdds.utils.RocksDBStoreMBean;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.metrics2.util.MBeans;
-import org.apache.hadoop.utils.RocksDBStoreMBean;
 
 import com.google.common.base.Preconditions;
-import org.apache.hadoop.utils.db.cache.TableCacheImpl;
+import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
 import org.apache.ratis.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.rocksdb.ColumnFamilyDescriptor;
 import org.rocksdb.ColumnFamilyHandle;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBStoreIterator.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.util.NoSuchElementException;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBTable.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RocksDBCheckpoint.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/RocksDBCheckpoint.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.nio.file.Path;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/SequenceNumberNotFoundException.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/SequenceNumberNotFoundException.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/StringCodec.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/StringCodec.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import org.apache.hadoop.hdfs.DFSUtil;

+ 3 - 3
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/Table.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.util.Iterator;
@@ -25,8 +25,8 @@ import java.util.Map;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.utils.db.cache.CacheKey;
-import org.apache.hadoop.utils.db.cache.CacheValue;
+import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
+import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
 /**
  * Interface for key-value store that stores ozone metadata. Ozone metadata is
  * stored as key value pairs, both key and value are arbitrary byte arrays. Each

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/TableConfig.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/TableConfig.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/TableIterator.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.Closeable;
 import java.io.IOException;

+ 10 - 10
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/TypedTable.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  *
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.util.Iterator;
@@ -24,15 +24,15 @@ import java.util.Map;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Optional;
-import org.apache.hadoop.utils.db.cache.CacheKey;
-import org.apache.hadoop.utils.db.cache.CacheResult;
-import org.apache.hadoop.utils.db.cache.CacheValue;
-import org.apache.hadoop.utils.db.cache.TableCacheImpl;
-import org.apache.hadoop.utils.db.cache.TableCache;
-import org.apache.hadoop.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
-
-import static org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus.EXISTS;
-import static org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST;
+import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
+import org.apache.hadoop.hdds.utils.db.cache.CacheResult;
+import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
+import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
+import org.apache.hadoop.hdds.utils.db.cache.TableCache;
+import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
+
+import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.EXISTS;
+import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST;
 /**
  * Strongly typed table implementation.
  * <p>

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/CacheKey.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/CacheKey.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import java.util.Objects;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/CacheResult.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/CacheResult.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import java.util.Objects;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/CacheValue.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/CacheValue.java

@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import com.google.common.base.Optional;
 

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/EpochEntry.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/EpochEntry.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import java.util.Objects;
 

+ 5 - 6
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/TableCache.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java

@@ -17,12 +17,11 @@
  *
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Evolving;
-import org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus;
-import org.apache.hadoop.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
+
 import java.util.Iterator;
 import java.util.Map;
 
@@ -78,15 +77,15 @@ public interface TableCache<CACHEKEY extends CacheKey,
    * Check key exist in cache or not.
    *
    * If it exists return CacheResult with value and status as
-   * {@link CacheStatus#EXISTS}
+   * {@link CacheResult.CacheStatus#EXISTS}
    *
    * If it does not exist:
    *  If cache clean up policy is
    *  {@link TableCacheImpl.CacheCleanupPolicy#NEVER} it means table cache is
    *  full cache. It return's {@link CacheResult} with null
-   *  and status as {@link CacheStatus#NOT_EXIST}.
+   *  and status as {@link CacheResult.CacheStatus#NOT_EXIST}.
    *
-   *  If cache clean up policy is {@link CacheCleanupPolicy#MANUAL} it means
+   *  If cache clean up policy is {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
    *  table cache is partial cache. It return's {@link CacheResult} with
    *  null and status as MAY_EXIST.
    *

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/TableCacheImpl.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCacheImpl.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import java.util.Iterator;
 import java.util.Map;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/cache/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/package-info.java

@@ -15,4 +15,4 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/package-info.java

@@ -19,4 +19,4 @@
 /**
  * Database interfaces for Ozone.
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;

+ 1 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/package-info.java → hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/package-info.java

@@ -15,4 +15,4 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestHddsIdFactory.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestHddsIdFactory.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import java.util.ArrayList;
 import java.util.List;

+ 5 - 6
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestMetadataStore.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java

@@ -14,7 +14,7 @@
  * License for the specific language governing permissions and limitations under
  * the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.io.FileUtils;
@@ -25,9 +25,8 @@ import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.DFSUtilClient;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
-import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
-import org.apache.hadoop.utils.MetadataStore.KeyValue;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -122,7 +121,7 @@ public class TestMetadataStore {
 
     //As database is empty, check whether iterator is working as expected or
     // not.
-    MetaStoreIterator<KeyValue> metaStoreIterator = dbStore.iterator();
+    MetaStoreIterator< MetadataStore.KeyValue > metaStoreIterator = dbStore.iterator();
     assertFalse(metaStoreIterator.hasNext());
     try {
       metaStoreIterator.next();
@@ -140,7 +139,7 @@ public class TestMetadataStore {
 
     int i = 0;
     while (metaStoreIterator.hasNext()) {
-      KeyValue val = metaStoreIterator.next();
+      MetadataStore.KeyValue val = metaStoreIterator.next();
       assertEquals("a" + i, getString(val.getKey()));
       assertEquals("a-value" + i, getString(val.getValue()));
       i++;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRetriableTask.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestRetriableTask.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/TestRocksDBStoreMBean.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestRocksDBStoreMBean.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;

+ 2 - 2
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBConfigFromFile.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestDBConfigFromFile.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.hdfs.DFSUtil;
@@ -40,7 +40,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import static org.apache.hadoop.utils.db.DBConfigFromFile.getOptionsFileNameFromDB;
+import static org.apache.hadoop.hdds.utils.db.DBConfigFromFile.getOptionsFileNameFromDB;
 
 /**
  * DBConf tests.

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestDBStoreBuilder.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestDBStoreBuilder.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.conf.Configuration;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestRDBStore.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStore.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import javax.management.MBeanServer;
 

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestRDBTableStore.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBTableStore.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;

+ 4 - 4
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/TestTypedRDBTableStore.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/TestTypedRDBTableStore.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -29,11 +29,11 @@ import java.util.Set;
 import com.google.common.base.Optional;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.utils.db.Table.KeyValue;
+import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
 
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.hadoop.utils.db.cache.CacheKey;
-import org.apache.hadoop.utils.db.cache.CacheValue;
+import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
+import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/cache/TestTableCacheImpl.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/cache/TestTableCacheImpl.java

@@ -17,7 +17,7 @@
  *
  */
 
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;
 
 import java.util.Arrays;
 import java.util.Collection;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/cache/package-info.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/cache/package-info.java

@@ -19,4 +19,4 @@
 /**
  * Tests for the DB Cache Utilities.
  */
-package org.apache.hadoop.utils.db.cache;
+package org.apache.hadoop.hdds.utils.db.cache;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/db/package-info.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/db/package-info.java

@@ -19,4 +19,4 @@
 /**
  * Tests for the DB Utilities.
  */
-package org.apache.hadoop.utils.db;
+package org.apache.hadoop.hdds.utils.db;

+ 1 - 1
hadoop-hdds/common/src/test/java/org/apache/hadoop/utils/package-info.java → hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/package-info.java

@@ -19,4 +19,4 @@
 /**
  * DB test Utils.
  */
-package org.apache.hadoop.utils;
+package org.apache.hadoop.hdds.utils;

+ 1 - 1
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/DeleteBlocksCommandHandler.java

@@ -47,7 +47,7 @@ import org.apache.hadoop.ozone.protocol.commands.DeleteBlockCommandStatus;
 import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
 import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
 import org.apache.hadoop.util.Time;
-import org.apache.hadoop.utils.BatchOperation;
+import org.apache.hadoop.hdds.utils.BatchOperation;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 2 - 2
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ContainerCache.java

@@ -23,8 +23,8 @@ import org.apache.commons.collections.MapIterator;
 import org.apache.commons.collections.map.LRUMap;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
-import org.apache.hadoop.utils.MetadataStore;
-import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.apache.hadoop.hdds.utils.MetadataStore;
+import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 1 - 1
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/utils/ReferenceCountedDB.java

@@ -21,7 +21,7 @@ package org.apache.hadoop.ozone.container.common.utils;
 import com.google.common.base.Preconditions;
 
 import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.hadoop.utils.MetadataStore;
+import org.apache.hadoop.hdds.utils.MetadataStore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 4 - 4
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueBlockIterator.java

@@ -28,11 +28,11 @@ import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
 import org.apache.hadoop.ozone.container.common.interfaces.BlockIterator;
 import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
 import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerLocationUtil;
-import org.apache.hadoop.utils.MetaStoreIterator;
-import org.apache.hadoop.utils.MetadataKeyFilters;
-import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.hdds.utils.MetaStoreIterator;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
-import org.apache.hadoop.utils.MetadataStore.KeyValue;
+import org.apache.hadoop.hdds.utils.MetadataStore.KeyValue;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 3 - 3
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/helpers/KeyValueContainerUtil.java

@@ -36,9 +36,9 @@ import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
 import org.apache.hadoop.ozone.container.common.helpers.BlockData;
 import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
-import org.apache.hadoop.utils.MetadataKeyFilters;
-import org.apache.hadoop.utils.MetadataStore;
-import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.MetadataStore;
+import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
 
 import com.google.common.base.Preconditions;
 import org.apache.commons.io.FileUtils;

+ 2 - 2
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/BlockManagerImpl.java

@@ -33,8 +33,8 @@ import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
 import org.apache.hadoop.ozone.container.common.interfaces.Container;
 import org.apache.hadoop.ozone.container.keyvalue.interfaces.BlockManager;
 import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
-import org.apache.hadoop.utils.BatchOperation;
-import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.BatchOperation;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 6 - 6
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/statemachine/background/BlockDeletingService.java

@@ -40,12 +40,12 @@ import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
 import org.apache.hadoop.ozone.OzoneConsts;
 import org.apache.hadoop.util.Time;
-import org.apache.hadoop.utils.BackgroundService;
-import org.apache.hadoop.utils.BackgroundTask;
-import org.apache.hadoop.utils.BackgroundTaskQueue;
-import org.apache.hadoop.utils.BackgroundTaskResult;
-import org.apache.hadoop.utils.BatchOperation;
-import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
+import org.apache.hadoop.hdds.utils.BackgroundService;
+import org.apache.hadoop.hdds.utils.BackgroundTask;
+import org.apache.hadoop.hdds.utils.BackgroundTaskQueue;
+import org.apache.hadoop.hdds.utils.BackgroundTaskResult;
+import org.apache.hadoop.hdds.utils.BatchOperation;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 1 - 1
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ozoneimpl/ContainerReader.java

@@ -40,7 +40,7 @@ import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
 import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
 import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
 import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerUtil;
-import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 2 - 2
hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestContainerCache.java

@@ -23,8 +23,8 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
-import org.apache.hadoop.utils.MetadataStore;
-import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.apache.hadoop.hdds.utils.MetadataStore;
+import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;

+ 1 - 1
hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueBlockIterator.java

@@ -33,7 +33,7 @@ import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingP
 import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
 import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
 import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
 import org.junit.After;
 import org.junit.Before;

+ 1 - 1
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/PrometheusMetricsSink.java

@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hdds.server;
 
-import static org.apache.hadoop.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
+import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
 
 import java.io.IOException;
 import java.io.Writer;

+ 1 - 1
hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/ServiceRuntimeInfoImpl.java

@@ -17,7 +17,7 @@
 
 package org.apache.hadoop.hdds.server;
 
-import org.apache.hadoop.utils.VersionInfo;
+import org.apache.hadoop.hdds.utils.VersionInfo;
 
 /**
  * Helper base class to report the standard version and runtime information.

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/BlockManagerImpl.java

@@ -45,7 +45,7 @@ import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException;
 import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.utils.UniqueId;
+import org.apache.hadoop.hdds.utils.UniqueId;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 3 - 3
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogImpl.java

@@ -47,9 +47,9 @@ import org.apache.hadoop.hdds.scm.container.ContainerReplica;
 import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
 import org.apache.hadoop.hdds.server.events.EventHandler;
 import org.apache.hadoop.hdds.server.events.EventPublisher;
-import org.apache.hadoop.utils.db.BatchOperation;
-import org.apache.hadoop.utils.db.Table;
-import org.apache.hadoop.utils.db.TableIterator;
+import org.apache.hadoop.hdds.utils.db.BatchOperation;
+import org.apache.hadoop.hdds.utils.db.Table;
+import org.apache.hadoop.hdds.utils.db.TableIterator;
 import org.eclipse.jetty.util.ConcurrentHashSet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

+ 4 - 4
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/SCMBlockDeletingService.java

@@ -30,10 +30,10 @@ import org.apache.hadoop.hdds.server.events.EventPublisher;
 import org.apache.hadoop.ozone.protocol.commands.CommandForDatanode;
 import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
 import org.apache.hadoop.util.Time;
-import org.apache.hadoop.utils.BackgroundService;
-import org.apache.hadoop.utils.BackgroundTask;
-import org.apache.hadoop.utils.BackgroundTaskQueue;
-import org.apache.hadoop.utils.BackgroundTaskResult.EmptyTaskResult;
+import org.apache.hadoop.hdds.utils.BackgroundService;
+import org.apache.hadoop.hdds.utils.BackgroundTask;
+import org.apache.hadoop.hdds.utils.BackgroundTaskQueue;
+import org.apache.hadoop.hdds.utils.BackgroundTaskResult.EmptyTaskResult;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 3 - 3
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/SCMContainerManager.java

@@ -33,9 +33,9 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
 import org.apache.hadoop.hdds.server.ServerUtils;
 import org.apache.hadoop.hdds.server.events.EventPublisher;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.utils.BatchOperation;
-import org.apache.hadoop.utils.MetadataStore;
-import org.apache.hadoop.utils.MetadataStoreBuilder;
+import org.apache.hadoop.hdds.utils.BatchOperation;
+import org.apache.hadoop.hdds.utils.MetadataStore;
+import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ReplicationActivityStatus.java

@@ -28,7 +28,7 @@ import org.apache.hadoop.metrics2.util.MBeans;
 
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.utils.Scheduler;
+import org.apache.hadoop.hdds.utils.Scheduler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/BigIntegerCodec.java

@@ -21,7 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
 
 import java.io.IOException;
 import java.math.BigInteger;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 /**
  * Encode and decode BigInteger.

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/DeletedBlocksTransactionCodec.java

@@ -24,7 +24,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
 import java.io.IOException;
 import org.apache.hadoop.hdds.protocol.proto
     .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 /**
  * Codec for Persisting the DeletedBlocks.

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/LongCodec.java

@@ -21,7 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
 
 import com.google.common.primitives.Longs;
 import java.io.IOException;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 /**
  * Codec for Persisting the DeletedBlocks.

+ 3 - 3
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStore.java

@@ -23,11 +23,11 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import java.io.IOException;
 import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
-import org.apache.hadoop.utils.db.DBStore;
-import org.apache.hadoop.utils.db.Table;
+import org.apache.hadoop.hdds.utils.db.DBStore;
+import org.apache.hadoop.hdds.utils.db.Table;
 import org.apache.hadoop.hdds.protocol.proto
     .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
-import org.apache.hadoop.utils.db.TableIterator;
+import org.apache.hadoop.hdds.utils.db.TableIterator;
 
 /**
  * Generic interface for data stores for SCM.

+ 5 - 5
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/SCMMetadataStoreRDBImpl.java

@@ -27,12 +27,12 @@ import java.io.IOException;
 import org.apache.hadoop.hdds.security.x509.certificate.authority
     .CertificateStore;
 import org.apache.hadoop.hdds.server.ServerUtils;
-import org.apache.hadoop.utils.db.DBStore;
-import org.apache.hadoop.utils.db.DBStoreBuilder;
-import org.apache.hadoop.utils.db.Table;
+import org.apache.hadoop.hdds.utils.db.DBStore;
+import org.apache.hadoop.hdds.utils.db.DBStoreBuilder;
+import org.apache.hadoop.hdds.utils.db.Table;
 import org.apache.hadoop.hdds.protocol.proto
     .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
-import org.apache.hadoop.utils.db.TableIterator;
+import org.apache.hadoop.hdds.utils.db.TableIterator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -126,7 +126,7 @@ public class SCMMetadataStoreRDBImpl implements SCMMetadataStore {
   }
 
   @Override
-  public org.apache.hadoop.utils.db.DBStore getStore() {
+  public DBStore getStore() {
     return this.store;
   }
 

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/X509CertificateCodec.java

@@ -25,7 +25,7 @@ import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 /**
  * Encodes and Decodes X509Certificate Class.

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/BackgroundPipelineCreator.java

@@ -21,7 +21,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.scm.ScmConfigKeys;
 import org.apache.hadoop.ozone.OzoneConfigKeys;
-import org.apache.hadoop.utils.Scheduler;
+import org.apache.hadoop.hdds.utils.Scheduler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 4 - 4
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java

@@ -33,10 +33,10 @@ import org.apache.hadoop.hdds.server.ServerUtils;
 import org.apache.hadoop.hdds.server.events.EventPublisher;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.hadoop.ozone.OzoneConsts;
-import org.apache.hadoop.utils.MetadataKeyFilters;
-import org.apache.hadoop.utils.MetadataStore;
-import org.apache.hadoop.utils.MetadataStoreBuilder;
-import org.apache.hadoop.utils.Scheduler;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.MetadataStore;
+import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
+import org.apache.hadoop.hdds.utils.Scheduler;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java

@@ -27,7 +27,7 @@ import java.util.concurrent.locks.ReentrantLock;
 import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
 import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
 import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
-import org.apache.hadoop.utils.db.BatchOperation;
+import org.apache.hadoop.hdds.utils.db.BatchOperation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 1 - 1
hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java

@@ -99,7 +99,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.util.JvmPauseMonitor;
-import org.apache.hadoop.utils.HddsVersionInfo;
+import org.apache.hadoop.hdds.utils.HddsVersionInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

+ 3 - 3
hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java

@@ -43,9 +43,9 @@ import org.apache.hadoop.hdds.protocol.proto
     .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto
     .DeleteBlockTransactionResult;
 import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.utils.MetadataKeyFilters;
-import org.apache.hadoop.utils.db.Table;
-import org.apache.hadoop.utils.db.TableIterator;
+import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
+import org.apache.hadoop.hdds.utils.db.Table;
+import org.apache.hadoop.hdds.utils.db.TableIterator;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;

+ 2 - 2
hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/OMMetadataManager.java

@@ -30,8 +30,8 @@ import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
 import org.apache.hadoop.ozone.om.lock.OzoneManagerLock;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeList;
 import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
-import org.apache.hadoop.utils.db.DBStore;
-import org.apache.hadoop.utils.db.Table;
+import org.apache.hadoop.hdds.utils.db.DBStore;
+import org.apache.hadoop.hdds.utils.db.Table;
 
 import com.google.common.annotations.VisibleForTesting;
 

+ 1 - 1
hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/codec/OmBucketInfoCodec.java

@@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.om.codec;
 import java.io.IOException;
 import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketInfo;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 import com.google.common.base.Preconditions;
 import com.google.protobuf.InvalidProtocolBufferException;

+ 1 - 1
hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/codec/OmKeyInfoCodec.java

@@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.om.codec;
 import java.io.IOException;
 import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 import com.google.common.base.Preconditions;
 import com.google.protobuf.InvalidProtocolBufferException;

+ 1 - 1
hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/codec/OmMultipartKeyInfoCodec.java

@@ -22,7 +22,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
 import java.io.IOException;
 import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
 import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
-import org.apache.hadoop.utils.db.Codec;
+import org.apache.hadoop.hdds.utils.db.Codec;
 
 
 /**

Some files were not shown because too many files changed in this diff