Przeglądaj źródła

HADOOP-18266. Using HashSet/ TreeSet Constructor for hadoop-common (#4365)

* HADOOP-18266. Using HashSet/ TreeSet Constructor for hadoop-common

Co-authored-by: Deb <dbsamrat@3c22fba1b03f.ant.amazon.com>
Samrat 2 lat temu
rodzic
commit
477b67a335
37 zmienionych plików z 69 dodań i 69 usunięć
  1. 1 1
      hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
  2. 2 2
      hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java
  3. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  4. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
  5. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java
  6. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java
  7. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java
  8. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
  9. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
  10. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java
  11. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java
  12. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java
  13. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java
  14. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java
  15. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
  16. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java
  17. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
  18. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
  19. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java
  20. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java
  21. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
  22. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
  23. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
  24. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
  25. 1 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java
  26. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
  27. 5 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java
  28. 12 11
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java
  29. 4 4
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java
  30. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
  31. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java
  32. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
  33. 1 1
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
  34. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java
  35. 3 3
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
  36. 2 2
      hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java
  37. 3 3
      hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java

+ 1 - 1
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java

@@ -236,7 +236,7 @@ public class KerberosUtil {
    */
   static final String[] getPrincipalNames(String keytabFileName) throws IOException {
     Keytab keytab = Keytab.loadKeytab(new File(keytabFileName));
-    Set<String> principals = new HashSet<String>();
+    Set<String> principals = new HashSet<>();
     List<PrincipalName> entries = keytab.getPrincipals();
     for (PrincipalName entry : entries) {
       principals.add(entry.getName().replace("\\", "/"));

+ 2 - 2
hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/KerberosTestUtils.java

@@ -108,9 +108,9 @@ public class KerberosTestUtils {
   public static <T> T doAs(String principal, final Callable<T> callable) throws Exception {
     LoginContext loginContext = null;
     try {
-      Set<Principal> principals = new HashSet<Principal>();
+      Set<Principal> principals = new HashSet<>();
       principals.add(new KerberosPrincipal(KerberosTestUtils.getClientPrincipal()));
-      Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
+      Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>());
       loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal));
       loginContext.login();
       subject = loginContext.getSubject();

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -774,7 +774,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   private void handleDeprecation() {
     LOG.debug("Handling deprecation for all properties in config...");
     DeprecationContext deprecations = deprecationContext.get();
-    Set<Object> keys = new HashSet<Object>();
+    Set<Object> keys = new HashSet<>();
     keys.addAll(getProps().keySet());
     for (Object item: keys) {
       LOG.debug("Handling deprecation for " + (String)item);

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java

@@ -2372,8 +2372,7 @@ public class FileContext implements PathCapabilities {
   Set<AbstractFileSystem> resolveAbstractFileSystems(final Path f)
       throws IOException {
     final Path absF = fixRelativePart(f);
-    final HashSet<AbstractFileSystem> result 
-      = new HashSet<AbstractFileSystem>();
+    final HashSet<AbstractFileSystem> result = new HashSet<>();
     new FSLinkResolver<Void>() {
       @Override
       public Void next(final AbstractFileSystem fs, final Path p)

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java

@@ -76,7 +76,7 @@ class SFTPConnectionPool {
     ConnectionInfo info = con2infoMap.get(channel);
     HashSet<ChannelSftp> cons = idleConnections.get(info);
     if (cons == null) {
-      cons = new HashSet<ChannelSftp>();
+      cons = new HashSet<>();
       idleConnections.put(info, cons);
     }
     cons.add(channel);
@@ -94,7 +94,7 @@ class SFTPConnectionPool {
     Set<ChannelSftp> cons = con2infoMap.keySet();
     if (cons != null && cons.size() > 0) {
       // make a copy since we need to modify the underlying Map
-      Set<ChannelSftp> copy = new HashSet<ChannelSftp>(cons);
+      Set<ChannelSftp> copy = new HashSet<>(cons);
       // Initiate disconnect from all outstanding connections
       for (ChannelSftp con : copy) {
         try {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandFormat.java

@@ -165,7 +165,7 @@ public class CommandFormat {
    * @return Set{@literal <}String{@literal >} of the enabled options
    */
   public Set<String> getOpts() {
-    Set<String> optSet = new HashSet<String>();
+    Set<String> optSet = new HashSet<>();
     for (Map.Entry<String, Boolean> entry : options.entrySet()) {
       if (entry.getValue()) {
         optSet.add(entry.getKey());

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java

@@ -96,7 +96,7 @@ public class Find extends FsCommand {
   private Expression rootExpression;
 
   /** Set of path items returning a {@link Result#STOP} result. */
-  private HashSet<Path> stopPaths = new HashSet<Path>();
+  private HashSet<Path> stopPaths = new HashSet<>();
 
   /** Register the expressions with the expression factory. */
   private static void registerExpressions(ExpressionFactory factory) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java

@@ -1037,7 +1037,7 @@ public class ViewFileSystem extends FileSystem {
     List<InodeTree.MountPoint<FileSystem>> mountPoints =
         fsState.getMountPoints();
     Map<String, FileSystem> fsMap = initializeMountedFileSystems(mountPoints);
-    Set<FileSystem> children = new HashSet<FileSystem>();
+    Set<FileSystem> children = new HashSet<>();
     for (InodeTree.MountPoint<FileSystem> mountPoint : mountPoints) {
       FileSystem targetFs = fsMap.get(mountPoint.src);
       children.addAll(Arrays.asList(targetFs.getChildFileSystems()));

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java

@@ -109,7 +109,7 @@ public class CodecPool {
       synchronized (pool) {
         codecSet = pool.get(codecClass);
         if (codecSet == null) {
-          codecSet = new HashSet<T>();
+          codecSet = new HashSet<>();
           pool.put(codecClass, codecSet);
         }
       }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java

@@ -64,7 +64,7 @@ public class AvroReflectSerialization extends AvroSerialization<Object>{
 
   private void getPackages() {
     String[] pkgList  = getConf().getStrings(AVRO_REFLECT_PACKAGES);
-    packages = new HashSet<String>();
+    packages = new HashSet<>();
     if (pkgList != null) {
       for (String pkg : pkgList) {
         packages.add(pkg.trim());

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java

@@ -69,7 +69,7 @@ public class ProtocolProxy<T> {
     }
     int[] serverMethodsCodes = serverInfo.getMethods();
     if (serverMethodsCodes != null) {
-      serverMethods = new HashSet<Integer>(serverMethodsCodes.length);
+      serverMethods = new HashSet<>(serverMethodsCodes.length);
       for (int m : serverMethodsCodes) {
         this.serverMethods.add(Integer.valueOf(m));
       }

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java

@@ -19,11 +19,10 @@
 package org.apache.hadoop.metrics2.lib;
 
 import java.lang.reflect.Method;
+import java.util.HashSet;
 import java.util.Set;
 
 import static org.apache.hadoop.util.Preconditions.*;
-import org.apache.hadoop.util.Sets;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@@ -44,7 +43,7 @@ import org.slf4j.LoggerFactory;
 public class MutableRates extends MutableMetric {
   static final Logger LOG = LoggerFactory.getLogger(MutableRates.class);
   private final MetricsRegistry registry;
-  private final Set<Class<?>> protocolCache = Sets.newHashSet();
+  private final Set<Class<?>> protocolCache = new HashSet<>();
 
   MutableRates(MetricsRegistry registry) {
     this.registry = checkNotNull(registry, "metrics registry");

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java

@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.metrics2.lib;
 
-import org.apache.hadoop.util.Sets;
 import java.lang.ref.WeakReference;
 import java.lang.reflect.Method;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
@@ -52,7 +52,7 @@ public class MutableRatesWithAggregation extends MutableMetric {
       LoggerFactory.getLogger(MutableRatesWithAggregation.class);
   private final Map<String, MutableRate> globalMetrics =
       new ConcurrentHashMap<>();
-  private final Set<Class<?>> protocolCache = Sets.newHashSet();
+  private final Set<Class<?>> protocolCache = new HashSet<>();
 
   private final ConcurrentLinkedDeque<WeakReference<ConcurrentMap<String, ThreadSafeSampleStat>>>
       weakReferenceQueue = new ConcurrentLinkedDeque<>();

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java

@@ -115,7 +115,7 @@ public abstract class AbstractDNSToSwitchMapping
     builder.append("Mapping: ").append(toString()).append("\n");
     if (rack != null) {
       builder.append("Map:\n");
-      Set<String> switches = new HashSet<String>();
+      Set<String> switches = new HashSet<>();
       for (Map.Entry<String, String> entry : rack.entrySet()) {
         builder.append("  ")
             .append(entry.getKey())

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java

@@ -1086,7 +1086,7 @@ public class NetworkTopology {
     String rackname = node.getNetworkLocation();
     Set<String> nodes = rackMap.get(rackname);
     if (nodes == null) {
-      nodes = new HashSet<String>();
+      nodes = new HashSet<>();
     }
     if (!decommissionNodes.contains(node.getName())) {
       nodes.add(node.getName());

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java

@@ -109,7 +109,7 @@ public class CompositeGroupsMapping
 
   @Override
   public synchronized Set<String> getGroupsSet(String user) throws IOException {
-    Set<String> groupSet = new HashSet<String>();
+    Set<String> groupSet = new HashSet<>();
 
     Set<String> groups = null;
     for (GroupMappingServiceProvider provider : providersList) {

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java

@@ -65,7 +65,7 @@ public class NetgroupCache {
   }
 
   private static Set<String> getGroups() {
-    Set<String> allGroups = new HashSet<String> ();
+    Set<String> allGroups = new HashSet<>();
     for (Set<String> userGroups : userToNetgroupsMap.values()) {
       allGroups.addAll(userGroups);
     }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java

@@ -105,8 +105,8 @@ public class AccessControlList implements Writable {
    * @param userGroupStrings build ACL from array of Strings
    */
   private void buildACL(String[] userGroupStrings) {
-    users = new HashSet<String>();
-    groups = new HashSet<String>();
+    users = new HashSet<>();
+    groups = new HashSet<>();
     for (String aclPart : userGroupStrings) {
       if (aclPart != null && isWildCardACLValue(aclPart)) {
         allAllowed = true;

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java

@@ -33,7 +33,7 @@ public class ProxyServers {
   }
 
   public static void refresh(Configuration conf){
-    Collection<String> tempServers = new HashSet<String>();
+    Collection<String> tempServers = new HashSet<>();
     // trusted proxy servers such as http proxies
     for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
       InetSocketAddress addr = new InetSocketAddress(host, 0);

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java

@@ -94,7 +94,7 @@ public class RestCsrfPreventionFilter implements Filter {
 
   void parseBrowserUserAgents(String userAgents) {
     String[] agentsArray =  userAgents.split(",");
-    browserUserAgents = new HashSet<Pattern>();
+    browserUserAgents = new HashSet<>();
     for (String patternString : agentsArray) {
       browserUserAgents.add(Pattern.compile(patternString));
     }
@@ -102,7 +102,7 @@ public class RestCsrfPreventionFilter implements Filter {
 
   void parseMethodsToIgnore(String mti) {
     String[] methods = mti.split(",");
-    methodsToIgnore = new HashSet<String>();
+    methodsToIgnore = new HashSet<>();
     for (int i = 0; i < methods.length; i++) {
       methodsToIgnore.add(methods[i]);
     }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java

@@ -716,7 +716,7 @@ extends AbstractDelegationTokenIdentifier>
   /** Remove expired delegation tokens from cache */
   private void removeExpiredToken() throws IOException {
     long now = Time.now();
-    Set<TokenIdent> expiredTokens = new HashSet<TokenIdent>();
+    Set<TokenIdent> expiredTokens = new HashSet<>();
     synchronized (this) {
       Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i =
           currentTokens.entrySet().iterator();

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java

@@ -89,7 +89,7 @@ public abstract class DelegationTokenAuthenticationHandler
 
   public static final String TOKEN_KIND = PREFIX + "token-kind";
 
-  private static final Set<String> DELEGATION_TOKEN_OPS = new HashSet<String>();
+  private static final Set<String> DELEGATION_TOKEN_OPS = new HashSet<>();
 
   public static final String DELEGATION_TOKEN_UGI_ATTRIBUTE =
       "hadoop.security.delegation-token.ugi";

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java

@@ -58,7 +58,7 @@ public class FileBasedIPList implements IPList {
       lines = null;
     }
     if (lines != null) {
-      addressList = new MachineList(new HashSet<String>(Arrays.asList(lines)));
+      addressList = new MachineList(new HashSet<>(Arrays.asList(lines)));
     } else {
       addressList = null;
     }

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java

@@ -135,7 +135,7 @@ public class HostsFileReader {
     if (xmlInput) {
       readXmlFileToMapWithFileInputStream(type, filename, inputStream, map);
     } else {
-      HashSet<String> nodes = new HashSet<String>();
+      HashSet<String> nodes = new HashSet<>();
       readFileToSetWithFileInputStream(type, filename, inputStream, nodes);
       for (String node : nodes) {
         map.put(node, null);

+ 1 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java

@@ -249,7 +249,7 @@ public final class ShutdownHookManager {
   }
 
   private final Set<HookEntry> hooks =
-      Collections.synchronizedSet(new HashSet<HookEntry>());
+      Collections.synchronizedSet(new HashSet<>());
 
   private AtomicBoolean shutdownInProgress = new AtomicBoolean(false);
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java

@@ -80,9 +80,9 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase {
         };
 
     // Initialize used variables
-    xmlPropsToSkipCompare = new HashSet<String>();
-    xmlPrefixToSkipCompare = new HashSet<String>();
-    configurationPropsToSkipCompare = new HashSet<String>();
+    xmlPropsToSkipCompare = new HashSet<>();
+    xmlPrefixToSkipCompare = new HashSet<>();
+    configurationPropsToSkipCompare = new HashSet<>();
 
     // Set error modes
     errorIfMissingConfigProps = true;

+ 5 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestValueQueue.java

@@ -18,6 +18,8 @@
 package org.apache.hadoop.crypto.key;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.Queue;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeoutException;
@@ -32,7 +34,6 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
-import org.apache.hadoop.util.Sets;
 
 public class TestValueQueue {
   Logger LOG = LoggerFactory.getLogger(TestValueQueue.class);
@@ -103,10 +104,10 @@ public class TestValueQueue {
     Assert.assertEquals(5, fillInfos[0].num);
     Assert.assertEquals(5, fillInfos[1].num);
     Assert.assertEquals(5, fillInfos[2].num);
-    Assert.assertEquals(Sets.newHashSet("k1", "k2", "k3"),
-        Sets.newHashSet(fillInfos[0].key,
+    Assert.assertEquals(new HashSet<>(Arrays.asList("k1", "k2", "k3")),
+        new HashSet<>(Arrays.asList(fillInfos[0].key,
             fillInfos[1].key,
-            fillInfos[2].key));
+            fillInfos[2].key)));
     vq.shutdown();
   }
 

+ 12 - 11
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/kms/TestLoadBalancingKMSClientProvider.java

@@ -39,6 +39,8 @@ import java.security.GeneralSecurityException;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
@@ -65,7 +67,6 @@ import org.junit.Test;
 import org.junit.rules.Timeout;
 import org.mockito.Mockito;
 
-import org.apache.hadoop.util.Sets;
 
 public class TestLoadBalancingKMSClientProvider {
 
@@ -86,8 +87,8 @@ public class TestLoadBalancingKMSClientProvider {
     KMSClientProvider[] providers =
         ((LoadBalancingKMSClientProvider) kp).getProviders();
     assertEquals(1, providers.length);
-    assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/"),
-        Sets.newHashSet(providers[0].getKMSUrl()));
+    assertEquals(new HashSet<>(Collections.singleton("http://host1:9600/kms/foo/v1/")),
+        new HashSet<>(Collections.singleton(providers[0].getKMSUrl())));
 
     kp = new KMSClientProvider.Factory().createProvider(new URI(
         "kms://http@host1;host2;host3:9600/kms/foo"), conf);
@@ -95,12 +96,12 @@ public class TestLoadBalancingKMSClientProvider {
     providers =
         ((LoadBalancingKMSClientProvider) kp).getProviders();
     assertEquals(3, providers.length);
-    assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/",
+    assertEquals(new HashSet<>(Arrays.asList("http://host1:9600/kms/foo/v1/",
         "http://host2:9600/kms/foo/v1/",
-        "http://host3:9600/kms/foo/v1/"),
-        Sets.newHashSet(providers[0].getKMSUrl(),
+        "http://host3:9600/kms/foo/v1/")),
+        new HashSet<>(Arrays.asList(providers[0].getKMSUrl(),
             providers[1].getKMSUrl(),
-            providers[2].getKMSUrl()));
+            providers[2].getKMSUrl())));
 
     kp = new KMSClientProvider.Factory().createProvider(new URI(
         "kms://http@host1;host2;host3:9600/kms/foo"), conf);
@@ -108,12 +109,12 @@ public class TestLoadBalancingKMSClientProvider {
     providers =
         ((LoadBalancingKMSClientProvider) kp).getProviders();
     assertEquals(3, providers.length);
-    assertEquals(Sets.newHashSet("http://host1:9600/kms/foo/v1/",
+    assertEquals(new HashSet<>(Arrays.asList("http://host1:9600/kms/foo/v1/",
         "http://host2:9600/kms/foo/v1/",
-        "http://host3:9600/kms/foo/v1/"),
-        Sets.newHashSet(providers[0].getKMSUrl(),
+        "http://host3:9600/kms/foo/v1/")),
+        new HashSet<>(Arrays.asList(providers[0].getKMSUrl(),
             providers[1].getKMSUrl(),
-            providers[2].getKMSUrl()));
+            providers[2].getKMSUrl())));
   }
 
   @Test

+ 4 - 4
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java

@@ -43,9 +43,9 @@ public class TestCommandFormat {
   
   @Before
   public void setUp() {
-    args = new ArrayList<String>();
-    expectedOpts = new HashSet<String>();
-    expectedArgs = new ArrayList<String>();
+    args = new ArrayList<>();
+    expectedOpts = new HashSet<>();
+    expectedArgs = new ArrayList<>();
   }
 
   @Test
@@ -205,6 +205,6 @@ public class TestCommandFormat {
   }
   
   private static Set<String> setOf(String ... objects) {
-    return new HashSet<String>(listOf(objects));
+    return new HashSet<>(listOf(objects));
   }
 }

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java

@@ -246,7 +246,7 @@ public class TestHarFileSystemBasics {
     // test.har has the following contents:
     //   dir1/1.txt
     //   dir1/2.txt
-    Set<String> expectedFileNames = new HashSet<String>();
+    Set<String> expectedFileNames = new HashSet<>();
     expectedFileNames.add("1.txt");
     expectedFileNames.add("2.txt");
 

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java

@@ -152,7 +152,7 @@ public class TestListFiles {
     writeFile(fs, FILE1, FILE_LEN);
     writeFile(fs, FILE3, FILE_LEN);
 
-    Set<Path> filesToFind = new HashSet<Path>();
+    Set<Path> filesToFind = new HashSet<>();
     filesToFind.add(fs.makeQualified(FILE1));
     filesToFind.add(fs.makeQualified(FILE2));
     filesToFind.add(fs.makeQualified(FILE3));

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java

@@ -747,7 +747,7 @@ public class TestTrash {
     Path myPath = new Path(TEST_DIR, "test/mkdirs");
     mkdir(fs, myPath);
     int fileIndex = 0;
-    Set<String> checkpoints = new HashSet<String>();
+    Set<String> checkpoints = new HashSet<>();
     while (true)  {
       // Create a file with a new name
       Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++);

+ 1 - 1
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java

@@ -275,7 +275,7 @@ public class TestIOUtils {
     File dir = new File("testListDirectory");
     Files.createDirectory(dir.toPath());
     try {
-      Set<String> entries = new HashSet<String>();
+      Set<String> entries = new HashSet<>();
       entries.add("entry1");
       entries.add("entry2");
       entries.add("entry3");

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java

@@ -69,7 +69,7 @@ public class TestCodecPool {
     Compressor comp = CodecPool.getCompressor(codec);
     CodecPool.returnCompressor(comp);
     CodecPool.returnCompressor(comp);
-    Set<Compressor> compressors = new HashSet<Compressor>();
+    Set<Compressor> compressors = new HashSet<>();
     for (int i = 0; i < 10; ++i) {
       compressors.add(CodecPool.getCompressor(codec));
     }
@@ -180,7 +180,7 @@ public class TestCodecPool {
     Decompressor decomp = CodecPool.getDecompressor(codec);
     CodecPool.returnDecompressor(decomp);
     CodecPool.returnDecompressor(decomp);
-    Set<Decompressor> decompressors = new HashSet<Decompressor>();
+    Set<Decompressor> decompressors = new HashSet<>();
     for (int i = 0; i < 10; ++i) {
       decompressors.add(CodecPool.getDecompressor(codec));
     }

+ 3 - 3
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java

@@ -39,6 +39,7 @@ import java.util.Objects;
 import java.util.Random;
 import java.util.Set;
 import java.util.Enumeration;
+import java.util.TreeSet;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
@@ -72,7 +73,6 @@ import org.mockito.stubbing.Answer;
 import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
-import org.apache.hadoop.util.Sets;
 
 import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
 import static org.apache.hadoop.util.functional.CommonCallableSupplier.submit;
@@ -344,13 +344,13 @@ public abstract class GenericTestUtils {
   public static void assertGlobEquals(File dir, String pattern,
       String ... expectedMatches) throws IOException {
 
-    Set<String> found = Sets.newTreeSet();
+    Set<String> found = new TreeSet<>();
     for (File f : FileUtil.listFiles(dir)) {
       if (f.getName().matches(pattern)) {
         found.add(f.getName());
       }
     }
-    Set<String> expectedSet = Sets.newTreeSet(
+    Set<String> expectedSet = new TreeSet<>(
         Arrays.asList(expectedMatches));
     Assert.assertEquals("Bad files matching " + pattern + " in " + dir,
         Joiner.on(",").join(expectedSet),

+ 2 - 2
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java

@@ -70,8 +70,8 @@ public abstract class MultithreadedTestUtil {
   public static class TestContext {
     private Throwable err = null;
     private boolean stopped = false;
-    private Set<TestingThread> testThreads = new HashSet<TestingThread>();
-    private Set<TestingThread> finishedThreads = new HashSet<TestingThread>();
+    private Set<TestingThread> testThreads = new HashSet<>();
+    private Set<TestingThread> finishedThreads = new HashSet<>();
 
     /**
      * Check if the context can run threads.

+ 3 - 3
hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java

@@ -36,9 +36,9 @@ import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
 import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
 import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalListener;
 import org.apache.hadoop.thirdparty.com.google.common.cache.RemovalNotification;
-import org.apache.hadoop.util.Sets;
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
@@ -56,10 +56,10 @@ import java.util.Set;
  */
 public class KMSAudit {
   @VisibleForTesting
-  static final Set<KMS.KMSOp> AGGREGATE_OPS_WHITELIST = Sets.newHashSet(
+  static final Set<KMS.KMSOp> AGGREGATE_OPS_WHITELIST = new HashSet<>(Arrays.asList(
       KMS.KMSOp.GET_KEY_VERSION, KMS.KMSOp.GET_CURRENT_KEY,
       KMS.KMSOp.DECRYPT_EEK, KMS.KMSOp.GENERATE_EEK, KMS.KMSOp.REENCRYPT_EEK
-  );
+  ));
 
   private Cache<String, AuditEvent> cache;