Browse Source

HADOOP-17009: Embrace Immutability of Java Collections

belugabehr 4 years ago
parent
commit
100ec8e870
20 changed files with 55 additions and 65 deletions
  1. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
  2. 2 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
  3. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
  4. 6 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
  5. 3 7
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java
  6. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java
  7. 1 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java
  8. 2 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
  9. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java
  10. 5 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java
  11. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java
  12. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
  13. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
  14. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
  15. 5 9
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
  16. 3 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java
  17. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java
  18. 2 1
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
  19. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java
  20. 4 4
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java

@@ -23,7 +23,6 @@ import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.InvocationTargetException;
 import java.net.URI;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URISyntaxException;
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.EnumSet;
@@ -1032,7 +1031,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    */
    */
   @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
   public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
-    return new ArrayList<Token<?>>(0);
+    return Collections.emptyList();
   }
   }
 
 
   /**
   /**

+ 2 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java

@@ -20,8 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.BufferedReader;
 import java.io.BufferedReader;
 import java.io.FileNotFoundException;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.Collections;
 import java.util.NoSuchElementException;
 import java.util.NoSuchElementException;
 import java.util.StringTokenizer;
 import java.util.StringTokenizer;
 
 
@@ -65,9 +64,7 @@ public class Stat extends Shell {
     this.blockSize = blockSize;
     this.blockSize = blockSize;
     this.dereference = deref;
     this.dereference = deref;
     // LANG = C setting
     // LANG = C setting
-    Map<String, String> env = new HashMap<String, String>();
-    env.put("LANG", "C");
-    setEnvironment(env);
+    setEnvironment(Collections.singletonMap("LANG", "C"));
   }
   }
 
 
   public FileStatus getFileStatus() throws IOException {
   public FileStatus getFileStatus() throws IOException {

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.shell;
 
 
 import java.io.FileNotFoundException;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.LinkedList;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.List;
 
 
@@ -97,7 +98,7 @@ class Delete {
           throw e;
           throw e;
         }
         }
         // prevent -f on a non-existent glob from failing
         // prevent -f on a non-existent glob from failing
-        return new LinkedList<PathData>();
+        return Collections.emptyList();
       }
       }
     }
     }
 
 

+ 6 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java

@@ -19,9 +19,9 @@ package org.apache.hadoop.ha;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.io.PrintStream;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Map;
 import java.util.Map;
 
 
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.Options;
@@ -107,8 +107,7 @@ public abstract class HAAdmin extends Configured implements Tool {
   protected abstract HAServiceTarget resolveTarget(String string);
   protected abstract HAServiceTarget resolveTarget(String string);
   
   
   protected Collection<String> getTargetIds(String targetNodeToActivate) {
   protected Collection<String> getTargetIds(String targetNodeToActivate) {
-    return new ArrayList<String>(
-        Arrays.asList(new String[]{targetNodeToActivate}));
+    return Collections.singleton(targetNodeToActivate);
   }
   }
 
 
   protected String getUsageString() {
   protected String getUsageString() {
@@ -188,8 +187,10 @@ public abstract class HAAdmin extends Configured implements Tool {
   private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive)
   private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive)
       throws IOException  {
       throws IOException  {
     Collection<String> targetIds = getTargetIds(targetNodeToActivate);
     Collection<String> targetIds = getTargetIds(targetNodeToActivate);
-    targetIds.remove(targetNodeToActivate);
-    for(String targetId : targetIds) {
+    for (String targetId : targetIds) {
+      if (targetNodeToActivate.equals(targetId)) {
+        continue;
+      }
       HAServiceTarget target = resolveTarget(targetId);
       HAServiceTarget target = resolveTarget(targetId);
       if (!checkManualStateManagementOK(target)) {
       if (!checkManualStateManagementOK(target)) {
         return true;
         return true;

+ 3 - 7
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java

@@ -19,7 +19,7 @@ package org.apache.hadoop.http.lib;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.security.Principal;
 import java.security.Principal;
-import java.util.HashMap;
+import java.util.Collections;
 
 
 import javax.servlet.FilterChain;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.FilterConfig;
@@ -121,14 +121,10 @@ public class StaticUserWebFilter extends FilterInitializer {
 
 
   @Override
   @Override
   public void initFilter(FilterContainer container, Configuration conf) {
   public void initFilter(FilterContainer container, Configuration conf) {
-    HashMap<String, String> options = new HashMap<String, String>();
-    
     String username = getUsernameFromConf(conf);
     String username = getUsernameFromConf(conf);
-    options.put(HADOOP_HTTP_STATIC_USER, username);
 
 
-    container.addFilter("static_user_filter", 
-                        StaticUserFilter.class.getName(), 
-                        options);
+    container.addFilter("static_user_filter", StaticUserFilter.class.getName(),
+        Collections.singletonMap(HADOOP_HTTP_STATIC_USER, username));
   }
   }
 
 
   /**
   /**

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java

@@ -18,7 +18,7 @@
 package org.apache.hadoop.metrics2.util;
 package org.apache.hadoop.metrics2.util;
 
 
 import java.lang.management.ManagementFactory;
 import java.lang.management.ManagementFactory;
-import java.util.HashMap;
+import java.util.Collections;
 import java.util.Map;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 import java.util.regex.Pattern;
@@ -70,8 +70,7 @@ public final class MBeans {
    */
    */
   static public ObjectName register(String serviceName, String nameName,
   static public ObjectName register(String serviceName, String nameName,
                                     Object theMbean) {
                                     Object theMbean) {
-    return register(serviceName, nameName, new HashMap<String, String>(),
-        theMbean);
+    return register(serviceName, nameName, Collections.emptyMap(), theMbean);
   }
   }
 
 
   /**
   /**

+ 1 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java

@@ -130,8 +130,7 @@ public class CachedDNSToSwitchMapping extends AbstractDNSToSwitchMapping {
    */
    */
   @Override
   @Override
   public Map<String, String> getSwitchMap() {
   public Map<String, String> getSwitchMap() {
-    Map<String, String > switchMap = new HashMap<String, String>(cache);
-    return switchMap;
+    return new HashMap<>(cache);
   }
   }
 
 
 
 

+ 2 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java

@@ -196,10 +196,8 @@ public class NetworkTopology {
         loc = loc.substring(1);
         loc = loc.substring(1);
       }
       }
       InnerNode rack = (InnerNode) clusterMap.getLoc(loc);
       InnerNode rack = (InnerNode) clusterMap.getLoc(loc);
-      if (rack == null) {
-        return null;
-      }
-      return new ArrayList<Node>(rack.getChildren());
+      return (rack == null) ? new ArrayList<>(0)
+          : new ArrayList<>(rack.getChildren());
     } finally {
     } finally {
       netlock.readLock().unlock();
       netlock.readLock().unlock();
     }
     }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java

@@ -25,6 +25,7 @@ import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
@@ -130,7 +131,7 @@ public class TableMapping extends CachedDNSToSwitchMapping {
         if (map == null) {
         if (map == null) {
           LOG.warn("Failed to read topology table. " +
           LOG.warn("Failed to read topology table. " +
             NetworkTopology.DEFAULT_RACK + " will be used for all nodes.");
             NetworkTopology.DEFAULT_RACK + " will be used for all nodes.");
-          map = new HashMap<String, String>();
+          map = Collections.emptyMap();
         }
         }
       }
       }
       List<String> results = new ArrayList<String>(names.size());
       List<String> results = new ArrayList<String>(names.size());

+ 5 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java

@@ -19,6 +19,7 @@ package org.apache.hadoop.security;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.Iterator;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
@@ -69,8 +70,8 @@ public class CompositeGroupsMapping
   public synchronized List<String> getGroups(String user) throws IOException {
   public synchronized List<String> getGroups(String user) throws IOException {
     Set<String> groupSet = new TreeSet<String>();
     Set<String> groupSet = new TreeSet<String>();
 
 
-    List<String> groups = null;
     for (GroupMappingServiceProvider provider : providersList) {
     for (GroupMappingServiceProvider provider : providersList) {
+      List<String> groups = Collections.emptyList();
       try {
       try {
         groups = provider.getGroups(user);
         groups = provider.getGroups(user);
       } catch (Exception e) {
       } catch (Exception e) {
@@ -78,17 +79,15 @@ public class CompositeGroupsMapping
             user, provider.getClass().getSimpleName(), e.toString());
             user, provider.getClass().getSimpleName(), e.toString());
         LOG.debug("Stacktrace: ", e);
         LOG.debug("Stacktrace: ", e);
       }        
       }        
-      if (groups != null && ! groups.isEmpty()) {
+      if (!groups.isEmpty()) {
         groupSet.addAll(groups);
         groupSet.addAll(groups);
         if (!combined) break;
         if (!combined) break;
       }
       }
     }
     }
 
 
-    List<String> results = new ArrayList<String>(groupSet.size());
-    results.addAll(groupSet);
-    return results;
+    return new ArrayList<>(groupSet);
   }
   }
-  
+
   /**
   /**
    * Caches groups, no need to do that for this provider
    * Caches groups, no need to do that for this provider
    */
    */

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.security;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.List;
 import java.util.LinkedList;
 import java.util.LinkedList;
 
 
@@ -125,6 +126,6 @@ public class JniBasedUnixGroupsNetgroupMapping
     if (users != null && users.length != 0) {
     if (users != null && users.length != 0) {
       return Arrays.asList(users);
       return Arrays.asList(users);
     }
     }
-    return new LinkedList<String>();
+    return Collections.emptyList();
   }
   }
 }
 }

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java

@@ -518,11 +518,11 @@ public class LdapGroupsMapping
     if (!results.hasMoreElements()) {
     if (!results.hasMoreElements()) {
       LOG.debug("doGetGroups({}) returned no groups because the " +
       LOG.debug("doGetGroups({}) returned no groups because the " +
           "user is not found.", user);
           "user is not found.", user);
-      return new ArrayList<>();
+      return Collections.emptyList();
     }
     }
     SearchResult result = results.nextElement();
     SearchResult result = results.nextElement();
 
 
-    List<String> groups = null;
+    List<String> groups = Collections.emptyList();
     if (useOneQuery) {
     if (useOneQuery) {
       try {
       try {
         /**
         /**
@@ -548,7 +548,7 @@ public class LdapGroupsMapping
                 "the second LDAP query using the user's DN.", e);
                 "the second LDAP query using the user's DN.", e);
       }
       }
     }
     }
-    if (groups == null || groups.isEmpty() || goUpHierarchy > 0) {
+    if (groups.isEmpty() || goUpHierarchy > 0) {
       groups = lookupGroup(result, c, goUpHierarchy);
       groups = lookupGroup(result, c, goUpHierarchy);
     }
     }
     LOG.debug("doGetGroups({}) returned {}", user, groups);
     LOG.debug("doGetGroups({}) returned {}", user, groups);

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java

@@ -17,9 +17,9 @@
  */
  */
 package org.apache.hadoop.security;
 package org.apache.hadoop.security;
 
 
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.HashSet;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.List;
 import java.util.Set;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentHashMap;
@@ -61,7 +61,7 @@ public class NetgroupCache {
    * @return list of cached groups
    * @return list of cached groups
    */
    */
   public static List<String> getNetgroupNames() {
   public static List<String> getNetgroupNames() {
-    return new LinkedList<String>(getGroups());
+    return new ArrayList<>(getGroups());
   }
   }
 
 
   private static Set<String> getGroups() {
   private static Set<String> getGroups() {

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java

@@ -24,6 +24,7 @@ import java.io.InputStreamReader;
 import java.nio.charset.Charset;
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Files;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Matcher;
@@ -534,7 +535,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
   static final class PassThroughMap<K> extends HashMap<K, K> {
   static final class PassThroughMap<K> extends HashMap<K, K> {
     
     
     public PassThroughMap() {
     public PassThroughMap() {
-      this(new HashMap<K, K>());
+      this(Collections.emptyMap());
     }
     }
     
     
     public PassThroughMap(Map<K, K> mapping) {
     public PassThroughMap(Map<K, K> mapping) {

+ 5 - 9
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java

@@ -20,9 +20,8 @@ package org.apache.hadoop.security.token.delegation.web;
 import java.io.IOException;
 import java.io.IOException;
 import java.io.Writer;
 import java.io.Writer;
 import java.text.MessageFormat;
 import java.text.MessageFormat;
-import java.util.HashMap;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.HashSet;
-import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.Set;
 import java.util.Set;
@@ -301,8 +300,7 @@ public abstract class DelegationTokenAuthenticationHandler
                   dt.decodeFromUrlString(tokenToRenew);
                   dt.decodeFromUrlString(tokenToRenew);
                   long expirationTime = tokenManager.renewToken(dt,
                   long expirationTime = tokenManager.renewToken(dt,
                       requestUgi.getShortUserName());
                       requestUgi.getShortUserName());
-                  map = new HashMap();
-                  map.put("long", expirationTime);
+                  map = Collections.singletonMap("long", expirationTime);
                 } catch (IOException ex) {
                 } catch (IOException ex) {
                   throw new AuthenticationException(ex.toString(), ex);
                   throw new AuthenticationException(ex.toString(), ex);
                 }
                 }
@@ -358,13 +356,11 @@ public abstract class DelegationTokenAuthenticationHandler
 
 
   @SuppressWarnings("unchecked")
   @SuppressWarnings("unchecked")
   private static Map delegationTokenToJSON(Token token) throws IOException {
   private static Map delegationTokenToJSON(Token token) throws IOException {
-    Map json = new LinkedHashMap();
-    json.put(
+    Map json = Collections.singletonMap(
         KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON,
         KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_URL_STRING_JSON,
         token.encodeToUrlString());
         token.encodeToUrlString());
-    Map response = new LinkedHashMap();
-    response.put(KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_JSON,
-        json);
+    Map response = Collections.singletonMap(
+        KerberosDelegationTokenAuthenticator.DELEGATION_TOKEN_JSON, json);
     return response;
     return response;
   }
   }
 
 

+ 3 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.service;
 
 
 import java.io.IOException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
@@ -426,7 +427,7 @@ public abstract class AbstractService implements Service {
 
 
   @Override
   @Override
   public synchronized List<LifecycleEvent> getLifecycleHistory() {
   public synchronized List<LifecycleEvent> getLifecycleHistory() {
-    return new ArrayList<LifecycleEvent>(lifecycleHistory);
+    return Collections.unmodifiableList(new ArrayList<>(lifecycleHistory));
   }
   }
 
 
   /**
   /**
@@ -483,8 +484,7 @@ public abstract class AbstractService implements Service {
   @Override
   @Override
   public Map<String, String> getBlockers() {
   public Map<String, String> getBlockers() {
     synchronized (blockerMap) {
     synchronized (blockerMap) {
-      Map<String, String> map = new HashMap<String, String>(blockerMap);
-      return map;
+      return Collections.unmodifiableMap(new HashMap<>(blockerMap));
     }
     }
   }
   }
 }
 }

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.service;
 package org.apache.hadoop.service;
 
 
 import java.util.ArrayList;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.List;
 
 
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
@@ -60,7 +61,7 @@ public class CompositeService extends AbstractService {
    */
    */
   public List<Service> getServices() {
   public List<Service> getServices() {
     synchronized (serviceList) {
     synchronized (serviceList) {
-      return new ArrayList<Service>(serviceList);
+      return Collections.unmodifiableList(new ArrayList<>(serviceList));
     }
     }
   }
   }
 
 

+ 2 - 1
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java

@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.net.URL;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.List;
 
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
@@ -894,7 +895,7 @@ public class ServiceLauncher<S extends Service>
       List<String> args) {
       List<String> args) {
     int size = args.size();
     int size = args.size();
     if (size <= 1) {
     if (size <= 1) {
-      return new ArrayList<>(0);
+      return Collections.emptyList();
     }
     }
     List<String> coreArgs = args.subList(1, size);
     List<String> coreArgs = args.subList(1, size);
 
 

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HttpExceptionUtils.java

@@ -28,6 +28,7 @@ import java.io.InputStream;
 import java.io.Writer;
 import java.io.Writer;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.Constructor;
 import java.net.HttpURLConnection;
 import java.net.HttpURLConnection;
+import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Map;
 
 
@@ -71,8 +72,8 @@ public class HttpExceptionUtils {
     json.put(ERROR_MESSAGE_JSON, getOneLineMessage(ex));
     json.put(ERROR_MESSAGE_JSON, getOneLineMessage(ex));
     json.put(ERROR_EXCEPTION_JSON, ex.getClass().getSimpleName());
     json.put(ERROR_EXCEPTION_JSON, ex.getClass().getSimpleName());
     json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
     json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
-    Map<String, Object> jsonResponse = new LinkedHashMap<String, Object>();
-    jsonResponse.put(ERROR_JSON, json);
+    Map<String, Object> jsonResponse =
+        Collections.singletonMap(ERROR_JSON, json);
     Writer writer = response.getWriter();
     Writer writer = response.getWriter();
     JsonSerialization.writer().writeValue(writer, jsonResponse);
     JsonSerialization.writer().writeValue(writer, jsonResponse);
     writer.flush();
     writer.flush();
@@ -91,8 +92,7 @@ public class HttpExceptionUtils {
     json.put(ERROR_MESSAGE_JSON, getOneLineMessage(ex));
     json.put(ERROR_MESSAGE_JSON, getOneLineMessage(ex));
     json.put(ERROR_EXCEPTION_JSON, ex.getClass().getSimpleName());
     json.put(ERROR_EXCEPTION_JSON, ex.getClass().getSimpleName());
     json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
     json.put(ERROR_CLASSNAME_JSON, ex.getClass().getName());
-    Map<String, Object> response = new LinkedHashMap<String, Object>();
-    response.put(ERROR_JSON, json);
+    Map<String, Object> response = Collections.singletonMap(ERROR_JSON, json);
     return Response.status(status).type(MediaType.APPLICATION_JSON).
     return Response.status(status).type(MediaType.APPLICATION_JSON).
         entity(response).build();
         entity(response).build();
   }
   }

+ 4 - 4
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java

@@ -28,6 +28,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 import java.util.Set;
 import java.util.Timer;
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.TimerTask;
@@ -871,6 +872,7 @@ public abstract class Shell {
     this.interval = interval;
     this.interval = interval;
     this.lastTime = (interval < 0) ? 0 : -interval;
     this.lastTime = (interval < 0) ? 0 : -interval;
     this.redirectErrorStream = redirectErrorStream;
     this.redirectErrorStream = redirectErrorStream;
+    this.environment = Collections.emptyMap();
   }
   }
 
 
   /**
   /**
@@ -878,7 +880,7 @@ public abstract class Shell {
    * @param env Mapping of environment variables
    * @param env Mapping of environment variables
    */
    */
   protected void setEnvironment(Map<String, String> env) {
   protected void setEnvironment(Map<String, String> env) {
-    this.environment = env;
+    this.environment = Objects.requireNonNull(env);
   }
   }
 
 
   /**
   /**
@@ -915,9 +917,7 @@ public abstract class Shell {
       builder.environment().clear();
       builder.environment().clear();
     }
     }
 
 
-    if (environment != null) {
-      builder.environment().putAll(this.environment);
-    }
+    builder.environment().putAll(this.environment);
 
 
     if (dir != null) {
     if (dir != null) {
       builder.directory(this.dir);
       builder.directory(this.dir);