Преглед изворни кода

HDDS-2154. Fix Checkstyle issues (#1475)

Elek, Márton пре 5 година
родитељ
комит
126ef77a81

+ 6 - 3
hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/client/HddsClientUtils.java

@@ -133,7 +133,8 @@ public final class HddsClientUtils {
    *
    * @throws IllegalArgumentException
    */
-  public static void verifyResourceName(String resName) throws IllegalArgumentException {
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
     if (resName == null) {
       throw new IllegalArgumentException("Bucket or Volume name is null");
     }
@@ -141,7 +142,8 @@ public final class HddsClientUtils {
     if (resName.length() < OzoneConsts.OZONE_MIN_BUCKET_NAME_LENGTH ||
         resName.length() > OzoneConsts.OZONE_MAX_BUCKET_NAME_LENGTH) {
       throw new IllegalArgumentException(
-          "Bucket or Volume length is illegal, valid length is 3-63 characters");
+          "Bucket or Volume length is illegal, "
+              + "valid length is 3-63 characters");
     }
 
     if (resName.charAt(0) == '.' || resName.charAt(0) == '-') {
@@ -151,7 +153,8 @@ public final class HddsClientUtils {
 
     if (resName.charAt(resName.length() - 1) == '.' ||
         resName.charAt(resName.length() - 1) == '-') {
-      throw new IllegalArgumentException("Bucket or Volume name cannot end with a period or dash");
+      throw new IllegalArgumentException("Bucket or Volume name "
+          + "cannot end with a period or dash");
     }
 
     boolean isIPv4 = true;

+ 2 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/LevelDBStoreIterator.java

@@ -25,7 +25,8 @@ import java.util.NoSuchElementException;
 /**
  * LevelDB store iterator.
  */
-public class LevelDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
+public class LevelDBStoreIterator
+    implements MetaStoreIterator<MetadataStore.KeyValue> {
 
 
   private DBIterator levelDBIterator;

+ 5 - 3
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/RocksDBStoreIterator.java

@@ -26,7 +26,8 @@ import java.util.NoSuchElementException;
 /**
  * RocksDB store iterator.
  */
-public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
+public class RocksDBStoreIterator
+    implements MetaStoreIterator<MetadataStore.KeyValue> {
 
   private RocksIterator rocksDBIterator;
 
@@ -43,8 +44,9 @@ public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.Ke
   @Override
   public MetadataStore.KeyValue next() {
     if (rocksDBIterator.isValid()) {
-      MetadataStore.KeyValue value = MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
-          .value());
+      MetadataStore.KeyValue value =
+          MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
+              .value());
       rocksDBIterator.next();
       return value;
     }

+ 2 - 1
hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/cache/TableCache.java

@@ -85,7 +85,8 @@ public interface TableCache<CACHEKEY extends CacheKey,
    *  full cache. It return's {@link CacheResult} with null
    *  and status as {@link CacheResult.CacheStatus#NOT_EXIST}.
    *
-   *  If cache clean up policy is {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
+   *  If cache clean up policy is
+   *  {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
    *  table cache is partial cache. It return's {@link CacheResult} with
    *  null and status as MAY_EXIST.
    *

+ 6 - 4
hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.hdds.utils;
 import com.google.common.collect.Lists;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.tuple.ImmutablePair;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 import org.apache.hadoop.hdfs.DFSUtil;
@@ -27,6 +28,7 @@ import org.apache.hadoop.ozone.OzoneConfigKeys;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
 import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -68,13 +70,14 @@ public class TestMetadataStore {
   public ExpectedException expectedException = ExpectedException.none();
   private MetadataStore store;
   private File testDir;
+
   public TestMetadataStore(String metadataImpl) {
     this.storeImpl = metadataImpl;
   }
 
   @Parameters
   public static Collection<Object[]> data() {
-    return Arrays.asList(new Object[][]{
+    return Arrays.asList(new Object[][] {
         {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
         {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
     });
@@ -121,7 +124,8 @@ public class TestMetadataStore {
 
     //As database is empty, check whether iterator is working as expected or
     // not.
-    MetaStoreIterator< MetadataStore.KeyValue > metaStoreIterator = dbStore.iterator();
+    MetaStoreIterator<MetadataStore.KeyValue> metaStoreIterator =
+        dbStore.iterator();
     assertFalse(metaStoreIterator.hasNext());
     try {
       metaStoreIterator.next();
@@ -162,7 +166,6 @@ public class TestMetadataStore {
 
   }
 
-
   @Test
   public void testMetaStoreConfigDifferentFromType() throws IOException {
 
@@ -199,7 +202,6 @@ public class TestMetadataStore {
     GenericTestUtils.LogCapturer logCapturer =
         GenericTestUtils.LogCapturer.captureLogs(MetadataStoreBuilder.LOG);
 
-
     File dbDir = GenericTestUtils.getTestDir(getClass().getSimpleName()
         + "-" + storeImpl.toLowerCase() + "-test");
     MetadataStore dbStore = MetadataStoreBuilder.newBuilder().setConf(conf)

+ 2 - 1
hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/transport/server/ratis/ContainerStateMachine.java

@@ -448,7 +448,8 @@ public class ContainerStateMachine extends BaseStateMachine {
                 + write.getChunkData().getChunkName() + e);
             raftFuture.completeExceptionally(e);
             throw e;
-          }}, chunkExecutor);
+          }
+        }, chunkExecutor);
 
     writeChunkFutureMap.put(entryIndex, writeChunkFuture);
     LOG.debug(gid + ": writeChunk writeStateMachineData : blockId " +

+ 2 - 1
hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/web/utils/OzoneUtils.java

@@ -145,7 +145,8 @@ public final class OzoneUtils {
    *
    * @throws IllegalArgumentException
    */
-  public static void verifyResourceName(String resName) throws IllegalArgumentException {
+  public static void verifyResourceName(String resName)
+      throws IllegalArgumentException {
     HddsClientUtils.verifyResourceName(resName);
   }
 

+ 12 - 22
hadoop-ozone/ozonefs/src/main/java/org/apache/hadoop/fs/ozone/OzoneFsShell.java

@@ -17,28 +17,12 @@
  */
 package org.apache.hadoop.fs.ozone;
 
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedList;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.fs.shell.Command;
 import org.apache.hadoop.fs.shell.CommandFactory;
 import org.apache.hadoop.fs.shell.FsCommand;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.tools.TableListing;
-import org.apache.hadoop.tracing.TraceUtils;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.htrace.core.TraceScope;
-import org.apache.htrace.core.Tracer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Provide command line access to a Ozone FileSystem. */
 @InterfaceAudience.Private
@@ -51,14 +35,19 @@ public class OzoneFsShell extends FsShell {
    * {@link #setConf(Configuration)} with a valid configuration prior
    * to running commands.
    */
-  public OzoneFsShell() { this(null); }
+  public OzoneFsShell() {
+    this(null);
+  }
 
   /**
-   * Construct a OzoneFsShell with the given configuration.  Commands can be
-   * executed via {@link #run(String[])}
+   * Construct a OzoneFsShell with the given configuration.
+   *
+   * Commands can be executed via {@link #run(String[])}
    * @param conf the hadoop configuration
    */
-  public OzoneFsShell(Configuration conf) { super(conf); }
+  public OzoneFsShell(Configuration conf) {
+    super(conf);
+  }
 
   protected void registerCommands(CommandFactory factory) {
     // TODO: DFSAdmin subclasses FsShell so need to protect the command
@@ -75,11 +64,12 @@ public class OzoneFsShell extends FsShell {
   }
 
   /**
-   * main() has some simple utility methods
+   * Main entry point to execute fs commands.
+   *
    * @param argv the command and its arguments
    * @throws Exception upon error
    */
-  public static void main(String argv[]) throws Exception {
+  public static void main(String[] argv) throws Exception {
     OzoneFsShell shell = newShellInstance();
     Configuration conf = new Configuration();
     conf.setQuietMode(false);

+ 0 - 1
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OzoneClientKeyValidator.java

@@ -22,7 +22,6 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.ozone.client.OzoneBucket;
 import org.apache.hadoop.ozone.client.OzoneClient;
 import org.apache.hadoop.ozone.client.OzoneClientFactory;
 

+ 0 - 1
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/S3KeyGenerator.java

@@ -19,7 +19,6 @@ package org.apache.hadoop.ozone.freon;
 import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
-import org.apache.hadoop.hdds.conf.OzoneConfiguration;
 
 import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
 import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;

+ 0 - 1
hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/SameKeyReader.java

@@ -22,7 +22,6 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hdds.cli.HddsVersionProvider;
 import org.apache.hadoop.hdds.conf.OzoneConfiguration;
-import org.apache.hadoop.ozone.client.OzoneBucket;
 import org.apache.hadoop.ozone.client.OzoneClient;
 import org.apache.hadoop.ozone.client.OzoneClientFactory;