Browse Source

HADOOP-19231. Add JacksonUtil to manage Jackson classes (#6953)

New class org.apache.hadoop.util.JacksonUtil centralizes construction of
Jackson ObjectMappers and JsonFactories.

Contributed by PJ Fanning
PJ Fanning 10 months ago
parent
commit
fa9bb0d1ac
71 changed files with 392 additions and 296 deletions
  1. 3 5
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  2. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
  3. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
  4. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
  5. 2 8
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
  6. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
  7. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
  8. 123 0
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java
  9. 3 6
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
  10. 2 4
      hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java
  11. 3 4
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java
  12. 6 6
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java
  13. 5 9
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
  14. 2 4
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java
  15. 2 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
  16. 2 1
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
  17. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java
  18. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java
  19. 4 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
  20. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
  21. 2 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java
  22. 3 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java
  23. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java
  24. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java
  25. 3 5
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java
  26. 2 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java
  27. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
  28. 12 12
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
  29. 2 3
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
  30. 2 1
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java
  31. 3 3
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java
  32. 2 2
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
  33. 4 4
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java
  34. 5 5
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java
  35. 3 5
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
  36. 3 4
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
  37. 3 2
      hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java
  38. 4 7
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java
  39. 5 4
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java
  40. 3 2
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java
  41. 5 6
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java
  42. 1 3
      hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java
  43. 3 7
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java
  44. 4 4
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
  45. 3 1
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java
  46. 5 7
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
  47. 3 2
      hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java
  48. 26 28
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java
  49. 19 15
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java
  50. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java
  51. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java
  52. 14 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java
  53. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
  54. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
  55. 4 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
  56. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
  57. 2 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java
  58. 3 9
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java
  59. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
  60. 2 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java
  61. 3 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java
  62. 3 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java
  63. 2 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java
  64. 3 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java
  65. 9 6
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java
  66. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
  67. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java
  68. 2 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java
  69. 4 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java
  70. 2 1
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java
  71. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java

+ 3 - 5
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -22,7 +22,6 @@ import com.ctc.wstx.api.ReaderConfig;
 import com.ctc.wstx.io.StreamBootstrapper;
 import com.ctc.wstx.io.SystemId;
 import com.ctc.wstx.stax.WstxInputFactory;
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 
 import java.io.BufferedInputStream;
@@ -101,6 +100,7 @@ import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
 import org.apache.hadoop.security.alias.CredentialProviderFactory;
 import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
 import org.apache.hadoop.util.ConfigurationHelper;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringInterner;
@@ -3792,8 +3792,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
       throw new IllegalArgumentException("Property " +
           propertyName + " not found");
     } else {
-      JsonFactory dumpFactory = new JsonFactory();
-      JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
+      JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
       dumpGenerator.writeStartObject();
       dumpGenerator.writeFieldName("property");
       appendJSONProperty(dumpGenerator, config, propertyName,
@@ -3831,8 +3830,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    */
   public static void dumpConfiguration(Configuration config,
       Writer out) throws IOException {
-    JsonFactory dumpFactory = new JsonFactory();
-    JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
+    JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
     dumpGenerator.writeStartObject();
     dumpGenerator.writeFieldName("properties");
     dumpGenerator.writeStartArray();

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java

@@ -41,6 +41,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.KMSUtil;
 import org.apache.http.client.utils.URIBuilder;
@@ -78,7 +79,6 @@ import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@@ -592,11 +592,10 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
         && conn.getContentType().trim().toLowerCase()
             .startsWith(APPLICATION_JSON_MIME)
         && klass != null) {
-      ObjectMapper mapper = new ObjectMapper();
       InputStream is = null;
       try {
         is = conn.getInputStream();
-        ret = mapper.readValue(is, klass);
+        ret = JacksonUtil.getSharedReader().readValue(is, klass);
       } finally {
         IOUtils.closeStream(is);
       }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java

@@ -38,10 +38,10 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import javax.management.ObjectName;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray;
 import org.apache.commons.lang3.exception.ExceptionUtils;
@@ -146,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler,
   public static final Logger LOG =
       LoggerFactory.getLogger(DecayRpcScheduler.class);
 
-  private static final ObjectWriter WRITER = new ObjectMapper().writer();
+  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
 
   // Track the decayed and raw (no decay) number of calls for each schedulable
   // identity from all previous decay windows: idx 0 for decayed call cost and

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

@@ -121,6 +121,7 @@ import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ProtoUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
@@ -130,7 +131,6 @@ import org.apache.hadoop.tracing.SpanContext;
 import org.apache.hadoop.tracing.TraceScope;
 import org.apache.hadoop.tracing.Tracer;
 import org.apache.hadoop.tracing.TraceUtils;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -3843,9 +3843,8 @@ public abstract class Server {
    * @return Get the NumOpenConnections/User.
    */
   public String getNumOpenConnectionsPerUser() {
-    ObjectMapper mapper = new ObjectMapper();
     try {
-      return mapper
+      return JacksonUtil.getSharedWriter()
           .writeValueAsString(connectionManager.getUserToConnectionsMap());
     } catch (IOException ignored) {
     }

+ 2 - 8
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java

@@ -43,13 +43,13 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.util.JacksonUtil;
 
 /*
  * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
@@ -134,11 +134,6 @@ public class JMXJsonServlet extends HttpServlet {
    */
   protected transient MBeanServer mBeanServer;
 
-  /**
-   * Json Factory to create Json generators for write objects in json format
-   */
-  protected transient JsonFactory jsonFactory;
-
   /**
    * Initialize this servlet.
    */
@@ -146,7 +141,6 @@ public class JMXJsonServlet extends HttpServlet {
   public void init() throws ServletException {
     // Retrieve the MBean server
     mBeanServer = ManagementFactory.getPlatformMBeanServer();
-    jsonFactory = new JsonFactory();
   }
 
   protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, 
@@ -187,7 +181,7 @@ public class JMXJsonServlet extends HttpServlet {
         response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET");
         response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
 
-        jg = jsonFactory.createGenerator(writer);
+        jg = JacksonUtil.getSharedWriter().createGenerator(writer);
         jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
         jg.useDefaultPrettyPrinter();
         jg.writeStartObject();

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.metrics2;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.JacksonUtil;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -46,8 +46,7 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
   private final MetricsCollector parent;
   private Map<String, Object> innerMetrics = new LinkedHashMap<>();
 
-  private static final ObjectWriter WRITER =
-      new ObjectMapper().writer();
+  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
 
   /**
    * Build an instance.

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java

@@ -46,6 +46,7 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -165,7 +166,7 @@ public abstract class DelegationTokenAuthenticationHandler
   @VisibleForTesting
   public void initJsonFactory(Properties config) {
     boolean hasFeature = false;
-    JsonFactory tmpJsonFactory = new JsonFactory();
+    JsonFactory tmpJsonFactory = JacksonUtil.createBasicJsonFactory();
 
     for (Map.Entry entry : config.entrySet()) {
       String key = (String)entry.getKey();
@@ -335,7 +336,7 @@ public abstract class DelegationTokenAuthenticationHandler
             if (map != null) {
               response.setContentType(MediaType.APPLICATION_JSON);
               Writer writer = response.getWriter();
-              ObjectMapper jsonMapper = new ObjectMapper(jsonFactory);
+              ObjectMapper jsonMapper = JacksonUtil.createObjectMapper(jsonFactory);
               jsonMapper.writeValue(writer, map);
               writer.write(ENTER);
               writer.flush();

+ 123 - 0
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java

@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+
+/**
+ * Utility for sharing code related to Jackson usage in Hadoop.
+ */
+@Private
+public final class JacksonUtil {
+
+  private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper();
+  private static final ObjectReader SHARED_BASIC_OBJECT_READER =
+      SHARED_BASIC_OBJECT_MAPPER.reader();
+  private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER =
+      SHARED_BASIC_OBJECT_MAPPER.writer();
+  private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY =
+      SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter();
+
+  /**
+   * Creates a new {@link JsonFactory} instance with basic configuration.
+   *
+   * @return an {@link JsonFactory} with basic configuration
+   */
+  public static JsonFactory createBasicJsonFactory() {
+    // deliberately return a new instance instead of sharing one because we can't trust
+    // that users won't modify this instance
+    return new JsonFactory();
+  }
+
+  /**
+   * Creates a new {@link ObjectMapper} instance with basic configuration.
+   *
+   * @return an {@link ObjectMapper} with basic configuration
+   */
+  public static ObjectMapper createBasicObjectMapper() {
+    // deliberately return a new instance instead of sharing one because we can't trust
+    // that users won't modify this instance
+    return JsonMapper.builder(createBasicJsonFactory()).build();
+  }
+
+  /**
+   * Creates a new {@link ObjectMapper} instance based on the configuration
+   * in the input {@link JsonFactory}.
+   *
+   * @param jsonFactory a pre-configured {@link JsonFactory}
+   * @return an {@link ObjectMapper} with configuration set by the input {@link JsonFactory}.
+   */
+  public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) {
+    return JsonMapper.builder(jsonFactory).build();
+  }
+
+  /**
+   * Returns a shared {@link ObjectReader} instance with basic configuration.
+   *
+   * @return a shared {@link ObjectReader} instance with basic configuration
+   */
+  public static ObjectReader getSharedReader() {
+    return SHARED_BASIC_OBJECT_READER;
+  }
+
+  /**
+   * Returns an {@link ObjectReader} for the given type instance with basic configuration.
+   *
+   * @param type the class that the reader has to support
+   * @return an {@link ObjectReader} instance with basic configuration
+   */
+  public static ObjectReader createBasicReaderFor(Class<?> type) {
+    return SHARED_BASIC_OBJECT_MAPPER.readerFor(type);
+  }
+
+  /**
+   * Returns a shared {@link ObjectWriter} instance with basic configuration.
+   *
+   * @return a shared {@link ObjectWriter} instance with basic configuration
+   */
+  public static ObjectWriter getSharedWriter() {
+    return SHARED_BASIC_OBJECT_WRITER;
+  }
+
+  /**
+   * Returns a shared {@link ObjectWriter} instance with pretty print and basic configuration.
+   *
+   * @return a shared {@link ObjectWriter} instance with pretty print and basic configuration
+   */
+  public static ObjectWriter getSharedWriterWithPrettyPrint() {
+    return SHARED_BASIC_OBJECT_WRITER_PRETTY;
+  }
+
+  /**
+   * Returns an {@link ObjectWriter} for the given type instance with basic configuration.
+   *
+   * @param type the class that the writer has to support
+   * @return an {@link ObjectWriter} instance with basic configuration
+   */
+  public static ObjectWriter createBasicWriterFor(Class<?> type) {
+    return SHARED_BASIC_OBJECT_MAPPER.writerFor(type);
+  }
+
+  private JacksonUtil() {}
+}

+ 3 - 6
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java

@@ -76,11 +76,8 @@ public class JsonSerialization<T> {
   private final Class<T> classType;
   private final ObjectMapper mapper;
 
-  private static final ObjectWriter WRITER =
-      new ObjectMapper().writerWithDefaultPrettyPrinter();
-
-  private static final ObjectReader MAP_READER =
-      new ObjectMapper().readerFor(Map.class);
+  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint();
+  private static final ObjectReader MAP_READER = JacksonUtil.createBasicReaderFor(Map.class);
 
   /**
    * @return an ObjectWriter which pretty-prints its output
@@ -106,7 +103,7 @@ public class JsonSerialization<T> {
       boolean failOnUnknownProperties, boolean pretty) {
     Preconditions.checkArgument(classType != null, "null classType");
     this.classType = classType;
-    this.mapper = new ObjectMapper();
+    this.mapper = JacksonUtil.createBasicObjectMapper();
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
         failOnUnknownProperties);
     mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty);

+ 2 - 4
hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java

@@ -17,9 +17,8 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.JacksonUtil;
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.WebApplicationException;
@@ -38,7 +37,6 @@ import java.util.Map;
 @Consumes(MediaType.APPLICATION_JSON)
 @InterfaceAudience.Private
 public class KMSJSONReader implements MessageBodyReader<Object> {
-  private static final ObjectMapper MAPPER = new ObjectMapper();
 
   @Override
   public boolean isReadable(Class<?> type, Type genericType,
@@ -52,6 +50,6 @@ public class KMSJSONReader implements MessageBodyReader<Object> {
       Annotation[] annotations, MediaType mediaType,
       MultivaluedMap<String, String> httpHeaders, InputStream entityStream)
       throws IOException, WebApplicationException {
-    return MAPPER.readValue(entityStream, type);
+    return JacksonUtil.getSharedReader().readValue(entityStream, type);
   }
 }

+ 3 - 4
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java

@@ -20,8 +20,8 @@
 package org.apache.hadoop.hdfs.server.datanode;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -35,9 +35,8 @@ import java.io.IOException;
 @InterfaceStability.Unstable
 @JsonInclude(JsonInclude.Include.NON_DEFAULT)
 public class DiskBalancerWorkItem {
-  private static final ObjectMapper MAPPER = new ObjectMapper();
   private static final ObjectReader READER =
-      new ObjectMapper().readerFor(DiskBalancerWorkItem.class);
+      JacksonUtil.createBasicReaderFor(DiskBalancerWorkItem.class);
 
   private  long startTime;
   private long secondsElapsed;
@@ -173,7 +172,7 @@ public class DiskBalancerWorkItem {
    * @throws IOException
    */
   public String toJson() throws IOException {
-    return MAPPER.writeValueAsString(this);
+    return JacksonUtil.getSharedWriter().writeValueAsString(this);
   }
 
   /**

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java

@@ -23,6 +23,7 @@ package org.apache.hadoop.hdfs.server.datanode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.SerializationFeature;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -39,14 +40,13 @@ import static com.fasterxml.jackson.databind.type.TypeFactory.defaultInstance;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class DiskBalancerWorkStatus {
-  private static final ObjectMapper MAPPER = new ObjectMapper();
+  private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper();
   private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT =
-      new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
+      JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
   private static final ObjectReader READER_WORKSTATUS =
-      new ObjectMapper().readerFor(DiskBalancerWorkStatus.class);
-  private static final ObjectReader READER_WORKENTRY = new ObjectMapper()
-      .readerFor(defaultInstance().constructCollectionType(List.class,
-          DiskBalancerWorkEntry.class));
+      MAPPER.readerFor(DiskBalancerWorkStatus.class);
+  private static final ObjectReader READER_WORKENTRY = MAPPER.readerFor(
+      defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class));
 
   private final List<DiskBalancerWorkEntry> currentState;
   private Result result;

+ 5 - 9
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java

@@ -18,9 +18,7 @@
 
 package org.apache.hadoop.hdfs.util;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import java.io.File;
@@ -42,6 +40,7 @@ import java.util.concurrent.TimeoutException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
+import org.apache.hadoop.util.JacksonUtil;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -83,7 +82,6 @@ public final class CombinedHostsFileReader {
   public static DatanodeAdminProperties[]
       readFile(final String hostsFilePath) throws IOException {
     DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
-    ObjectMapper objectMapper = new ObjectMapper();
     File hostFile = new File(hostsFilePath);
     boolean tryOldFormat = false;
 
@@ -91,7 +89,8 @@ public final class CombinedHostsFileReader {
       try (Reader input =
           new InputStreamReader(
               Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
-        allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
+        allDNs = JacksonUtil.getSharedReader()
+            .readValue(input, DatanodeAdminProperties[].class);
       } catch (JsonMappingException jme) {
         // The old format doesn't have json top-level token to enclose
         // the array.
@@ -103,15 +102,12 @@ public final class CombinedHostsFileReader {
     }
 
     if (tryOldFormat) {
-      ObjectReader objectReader =
-          objectMapper.readerFor(DatanodeAdminProperties.class);
-      JsonFactory jsonFactory = new JsonFactory();
+      ObjectReader objectReader = JacksonUtil.createBasicReaderFor(DatanodeAdminProperties.class);
       List<DatanodeAdminProperties> all = new ArrayList<>();
       try (Reader input =
           new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
                   StandardCharsets.UTF_8)) {
-        Iterator<DatanodeAdminProperties> iterator =
-            objectReader.readValues(jsonFactory.createParser(input));
+        Iterator<DatanodeAdminProperties> iterator = objectReader.readValues(input);
         while (iterator.hasNext()) {
           DatanodeAdminProperties properties = iterator.next();
           all.add(properties);

+ 2 - 4
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java

@@ -26,11 +26,11 @@ import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.Set;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * Writer support for JSON-based datanode configuration, an alternative format
@@ -59,12 +59,10 @@ public final class CombinedHostsFileWriter {
    */
   public static void writeFile(final String hostsFile,
       final Set<DatanodeAdminProperties> allDNs) throws IOException {
-    final ObjectMapper objectMapper = new ObjectMapper();
-
     try (Writer output =
         new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
             StandardCharsets.UTF_8)) {
-      objectMapper.writeValue(output, allDNs);
+      JacksonUtil.getSharedWriter().writeValue(output, allDNs);
     }
   }
 }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java

@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.hdfs.web;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
 import org.apache.hadoop.fs.ContentSummary;
@@ -654,7 +654,7 @@ public class JsonUtilClient {
     }
 
     final String namesInJson = (String) json.get("XAttrNames");
-    ObjectReader reader = new ObjectMapper().readerFor(List.class);
+    ObjectReader reader = JacksonUtil.createBasicReaderFor(List.class);
     final List<Object> xattrs = reader.readValue(namesInJson);
     final List<String> names =
         Lists.newArrayListWithCapacity(json.keySet().size());

+ 2 - 1
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java

@@ -71,6 +71,7 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthentica
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
 import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -1818,7 +1819,7 @@ public class HttpFSFileSystem extends FileSystem
 
   @VisibleForTesting
   static BlockLocation[] toBlockLocations(JSONObject json) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     MapType subType = mapper.getTypeFactory().constructMapType(Map.class,
         String.class, BlockLocation[].class);
     MapType rootType = mapper.getTypeFactory().constructMapType(Map.class,

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java

@@ -21,7 +21,6 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
@@ -32,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
 import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.Timer;
 import org.slf4j.Logger;
@@ -71,7 +71,7 @@ public class SlowDiskTracker {
   /**
    * ObjectWriter to convert JSON reports to String.
    */
-  private static final ObjectWriter WRITER = new ObjectMapper().writer();
+  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
 
   /**
    * Number of disks to include in JSON report per operation. We will return

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java

@@ -19,7 +19,6 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
@@ -30,6 +29,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics;
 import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Timer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -75,7 +75,8 @@ public class SlowPeerTracker {
   /**
    * ObjectWriter to convert JSON reports to String.
    */
-  private static final ObjectWriter WRITER = new ObjectMapper().writer();
+  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
+
   /**
    * Number of nodes to include in JSON report. We will return nodes with
    * the highest number of votes from peers.

+ 4 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java

@@ -79,18 +79,18 @@ import org.apache.hadoop.util.CloseableReferenceCount;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
+import org.apache.hadoop.util.Preconditions;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Timer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
-import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
@@ -103,10 +103,9 @@ import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFact
 public class FsVolumeImpl implements FsVolumeSpi {
   public static final Logger LOG =
       LoggerFactory.getLogger(FsVolumeImpl.class);
-  private static final ObjectWriter WRITER =
-      new ObjectMapper().writerWithDefaultPrettyPrinter();
+  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint();
   private static final ObjectReader READER =
-      new ObjectMapper().readerFor(BlockIteratorState.class);
+      JacksonUtil.createBasicReaderFor(BlockIteratorState.class);
 
   private final FsDatasetImpl dataset;
   private final String storageID;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java

@@ -32,7 +32,6 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -60,6 +59,7 @@ import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
 import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Timer;
@@ -369,7 +369,7 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
   }
 
   private static final ObjectWriter WRITER =
-      new ObjectMapper().writerWithDefaultPrettyPrinter();
+      JacksonUtil.getSharedWriterWithPrettyPrint();
 
   private static class ProvidedBlockIteratorState {
     ProvidedBlockIteratorState() {

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java

@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hdfs.server.diskbalancer.command;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import org.apache.commons.cli.CommandLine;
@@ -47,6 +46,7 @@ import org.apache.hadoop.hdfs.tools.DiskBalancerCLI;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.HostsFileReader;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -77,8 +77,7 @@ import java.util.TreeSet;
  * Common interface for command handling.
  */
 public abstract class Command extends Configured implements Closeable {
-  private static final ObjectReader READER =
-      new ObjectMapper().readerFor(HashMap.class);
+  private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(HashMap.class);
   static final Logger LOG = LoggerFactory.getLogger(Command.class);
   private Map<String, String> validArgs = new HashMap<>();
   private URI clusterURI;

+ 3 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java

@@ -17,15 +17,14 @@
 
 package org.apache.hadoop.hdfs.server.diskbalancer.connectors;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster;
-import org.apache.hadoop.hdfs.server.diskbalancer.datamodel
-    .DiskBalancerDataNode;
+import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
 
 import java.io.File;
 import java.net.URL;
@@ -38,7 +37,7 @@ public class JsonNodeConnector implements ClusterConnector {
   private static final Logger LOG =
       LoggerFactory.getLogger(JsonNodeConnector.class);
   private static final ObjectReader READER =
-      new ObjectMapper().readerFor(DiskBalancerCluster.class);
+      JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class);
   private final URL clusterURI;
 
   /**

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java

@@ -19,9 +19,7 @@ package org.apache.hadoop.hdfs.server.diskbalancer.datamodel;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
-import org.apache.hadoop.util.Preconditions;
 
 import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
@@ -31,6 +29,8 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory;
 import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.util.JacksonUtil;
+import org.apache.hadoop.util.Preconditions;
 
 import java.io.File;
 import java.io.IOException;
@@ -73,7 +73,7 @@ public class DiskBalancerCluster {
   private static final Logger LOG =
       LoggerFactory.getLogger(DiskBalancerCluster.class);
   private static final ObjectReader READER =
-      new ObjectMapper().readerFor(DiskBalancerCluster.class);
+      JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class);
   private final Set<String> exclusionList;
   private final Set<String> inclusionList;
   private ClusterConnector clusterConnector;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java

@@ -19,10 +19,10 @@ package org.apache.hadoop.hdfs.server.diskbalancer.datamodel;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.util.JacksonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,7 +34,7 @@ import java.io.IOException;
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class DiskBalancerVolume {
   private static final ObjectReader READER =
-      new ObjectMapper().readerFor(DiskBalancerVolume.class);
+      JacksonUtil.createBasicReaderFor(DiskBalancerVolume.class);
 
   private static final Logger LOG =
       LoggerFactory.getLogger(DiskBalancerVolume.class);

+ 3 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java

@@ -18,9 +18,9 @@
 package org.apache.hadoop.hdfs.server.diskbalancer.planner;
 
 import com.fasterxml.jackson.annotation.JsonTypeInfo;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.ObjectWriter;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 
 import java.io.IOException;
@@ -39,10 +39,8 @@ public class NodePlan {
   private int port;
   private long timeStamp;
 
-  private static final ObjectMapper MAPPER = new ObjectMapper();
-  private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class);
-  private static final ObjectWriter WRITER = MAPPER.writerFor(
-      MAPPER.constructType(NodePlan.class));
+  private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(NodePlan.class);
+  private static final ObjectWriter WRITER = JacksonUtil.createBasicWriterFor(NodePlan.class);
   /**
    * returns timestamp when this plan was created.
    *

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
@@ -26,6 +25,7 @@ import org.apache.hadoop.net.Node;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 
 import javax.servlet.ServletContext;
@@ -123,8 +123,7 @@ public class NetworkTopologyServlet extends DfsServlet {
 
   protected void printJsonFormat(PrintStream stream, Map<String,
       TreeSet<String>> tree, ArrayList<String> racks) throws IOException {
-    JsonFactory dumpFactory = new JsonFactory();
-    JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream);
+    JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(stream);
     dumpGenerator.writeStartArray();
 
     for(String r : racks) {

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java

@@ -21,7 +21,6 @@ import java.io.IOException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
@@ -29,6 +28,7 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressVie
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.JacksonUtil;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 
@@ -61,7 +61,7 @@ public class StartupProgressServlet extends DfsServlet {
     StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(
       getServletContext());
     StartupProgressView view = prog.createView();
-    JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
+    JsonGenerator json = JacksonUtil.getSharedWriter().createGenerator(resp.getWriter());
     try {
       json.writeStartObject();
       json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());

+ 12 - 12
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.web;
 
+import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.ContentSummary;
@@ -38,13 +39,12 @@ import org.apache.hadoop.hdfs.protocol.*;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.StringUtils;
 
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-
 import java.io.IOException;
 import java.util.*;
 
@@ -52,11 +52,11 @@ import java.util.*;
 public class JsonUtil {
   private static final Object[] EMPTY_OBJECT_ARRAY = {};
 
-  // Reuse ObjectMapper instance for improving performance.
-  // ObjectMapper is thread safe as long as we always configure instance
+  // Reuse ObjectWriter instance for improving performance.
+  // ObjectWriter is thread safe as long as we always configure instance
   // before use. We don't have a re-entrant call pattern in WebHDFS,
   // so we just need to worry about thread-safety.
-  private static final ObjectMapper MAPPER = new ObjectMapper();
+  private static final ObjectWriter SHARED_WRITER = JacksonUtil.getSharedWriter();
 
   /** Convert a token object to a Json string. */
   public static String toJsonString(final Token<? extends TokenIdentifier> token
@@ -93,7 +93,7 @@ public class JsonUtil {
     final Map<String, Object> m = new TreeMap<String, Object>();
     m.put(key, value);
     try {
-      return MAPPER.writeValueAsString(m);
+      return SHARED_WRITER.writeValueAsString(m);
     } catch (IOException ignored) {
     }
     return null;
@@ -113,7 +113,7 @@ public class JsonUtil {
     final Map<String, Object> m = toJsonMap(status);
     try {
       return includeType ?
-          toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m);
+          toJsonString(FileStatus.class, m) : SHARED_WRITER.writeValueAsString(m);
     } catch (IOException ignored) {
     }
     return null;
@@ -453,7 +453,7 @@ public class JsonUtil {
     finalMap.put(AclStatus.class.getSimpleName(), m);
 
     try {
-      return MAPPER.writeValueAsString(finalMap);
+      return SHARED_WRITER.writeValueAsString(finalMap);
     } catch (IOException ignored) {
     }
     return null;
@@ -491,7 +491,7 @@ public class JsonUtil {
       final XAttrCodec encoding) throws IOException {
     final Map<String, Object> finalMap = new TreeMap<String, Object>();
     finalMap.put("XAttrs", toJsonArray(xAttrs, encoding));
-    return MAPPER.writeValueAsString(finalMap);
+    return SHARED_WRITER.writeValueAsString(finalMap);
   }
   
   public static String toJsonString(final List<XAttr> xAttrs)
@@ -500,14 +500,14 @@ public class JsonUtil {
     for (XAttr xAttr : xAttrs) {
       names.add(XAttrHelper.getPrefixedName(xAttr));
     }
-    String ret = MAPPER.writeValueAsString(names);
+    String ret = SHARED_WRITER.writeValueAsString(names);
     final Map<String, Object> finalMap = new TreeMap<String, Object>();
     finalMap.put("XAttrNames", ret);
-    return MAPPER.writeValueAsString(finalMap);
+    return SHARED_WRITER.writeValueAsString(finalMap);
   }
 
   public static String toJsonString(Object obj) throws IOException {
-    return MAPPER.writeValueAsString(obj);
+    return SHARED_WRITER.writeValueAsString(obj);
   }
 
   public static String toJsonString(BlockStoragePolicy[] storagePolicies) {

+ 2 - 3
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java

@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.mapred;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerationException;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -28,6 +27,7 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.QueueState;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -531,8 +531,7 @@ public class QueueManager {
       return;
     }
     
-    JsonFactory dumpFactory = new JsonFactory();
-    JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
+    JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
     QueueConfigurationParser parser;
     boolean aclsEnabled = false;
     if (conf != null) {

+ 2 - 1
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java

@@ -28,6 +28,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.CounterGroup;
 import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 
 /**
@@ -41,7 +42,7 @@ public final class JobHistoryEventUtils {
   public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
 
   public static JsonNode countersToJSON(Counters counters) {
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     ArrayNode nodes = mapper.createArrayNode();
     if (counters != null) {
       for (CounterGroup counterGroup : counters) {

+ 3 - 3
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java

@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,6 +29,7 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.s3a.S3AUtils;
+import org.apache.hadoop.util.JacksonUtil;
 
 import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT;
 
@@ -91,8 +91,8 @@ public final class S3AEncryption {
       if (encryptionContextMap.isEmpty()) {
         return "";
       }
-      final String encryptionContextJson = new ObjectMapper().writeValueAsString(
-          encryptionContextMap);
+      final String encryptionContextJson = JacksonUtil.getSharedWriter()
+          .writeValueAsString(encryptionContextMap);
       return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8));
     } catch (IOException e) {
       if (propagateExceptions) {

+ 2 - 2
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java

@@ -84,6 +84,7 @@ import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.LambdaUtils;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Time;
@@ -96,7 +97,6 @@ import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_ST
 import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*;
 import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 import com.microsoft.azure.storage.StorageException;
 
@@ -127,7 +127,7 @@ public class NativeAzureFileSystem extends FileSystem {
     private static final int FORMATTING_BUFFER = 10000;
     private boolean committed;
     public static final String SUFFIX = "-RenamePending.json";
-    private static final ObjectReader READER = new ObjectMapper()
+    private static final ObjectReader READER = JacksonUtil.createBasicObjectMapper()
         .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
         .readerFor(JsonNode.class);
 

+ 4 - 4
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java

@@ -24,11 +24,11 @@ import java.net.URISyntaxException;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
-import com.fasterxml.jackson.databind.ObjectReader;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.azure.security.Constants;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.http.NameValuePair;
@@ -40,7 +40,7 @@ import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectReader;
 
 import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
 
@@ -53,8 +53,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl {
 
   public static final Logger LOG =
       LoggerFactory.getLogger(AzureNativeFileSystemStore.class);
-  private static final ObjectReader RESPONSE_READER = new ObjectMapper()
-      .readerFor(RemoteSASKeyGenerationResponse.class);
+  private static final ObjectReader RESPONSE_READER = JacksonUtil
+      .createBasicReaderFor(RemoteSASKeyGenerationResponse.class);
 
   /**
    * Configuration parameter name expected in the Configuration

+ 5 - 5
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java

@@ -20,7 +20,6 @@ package org.apache.hadoop.fs.azure;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.commons.lang3.StringUtils;
@@ -29,13 +28,14 @@ import org.apache.hadoop.fs.azure.security.Constants;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.utils.URIBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import java.util.concurrent.TimeUnit;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
 import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
 
@@ -49,8 +49,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface {
 
   public static final Logger LOG = LoggerFactory
       .getLogger(RemoteWasbAuthorizerImpl.class);
-  private static final ObjectReader RESPONSE_READER = new ObjectMapper()
-      .readerFor(RemoteWasbAuthorizerResponse.class);
+  private static final ObjectReader RESPONSE_READER = JacksonUtil
+      .createBasicReaderFor(RemoteWasbAuthorizerResponse.class);
 
   /**
    * Configuration parameter name expected in the Configuration object to
@@ -176,7 +176,7 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface {
       uriBuilder
           .addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath);
       uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType);
-      if (resourceOwner != null && StringUtils.isNotEmpty(resourceOwner)) {
+      if (StringUtils.isNotEmpty(resourceOwner)) {
         uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME,
             resourceOwner);
       }

+ 3 - 5
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java

@@ -29,9 +29,6 @@ import java.util.Date;
 import java.util.Hashtable;
 import java.util.Map;
 
-import org.apache.hadoop.util.Preconditions;
-
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonToken;
 import org.slf4j.Logger;
@@ -42,6 +39,8 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils;
 import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy;
+import org.apache.hadoop.util.JacksonUtil;
+import org.apache.hadoop.util.Preconditions;
 
 /**
  * This class provides convenience methods to obtain AAD tokens.
@@ -493,8 +492,7 @@ public final class AzureADAuthenticator {
       int expiryPeriodInSecs = 0;
       long expiresOnInSecs = -1;
 
-      JsonFactory jf = new JsonFactory();
-      JsonParser jp = jf.createParser(httpResponseStream);
+      JsonParser jp = JacksonUtil.createBasicJsonFactory().createParser(httpResponseStream);
       String fieldName, fieldValue;
       jp.nextToken();
       while (jp.hasCurrentToken()) {

+ 3 - 4
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java

@@ -30,7 +30,6 @@ import java.util.Map;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonToken;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,6 +39,7 @@ import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
 import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable;
 import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema;
 import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * Base Http operation class for orchestrating server IO calls. Child classes would
@@ -447,7 +447,7 @@ public abstract class AbfsHttpOperation implements AbfsPerfLoggable {
       if (stream == null) {
         return;
       }
-      JsonFactory jf = new JsonFactory();
+      JsonFactory jf = JacksonUtil.createBasicJsonFactory();
       try (JsonParser jp = jf.createParser(stream)) {
         String fieldName, fieldValue;
         jp.nextToken();  // START_OBJECT - {
@@ -509,8 +509,7 @@ public abstract class AbfsHttpOperation implements AbfsPerfLoggable {
     }
 
     try {
-      final ObjectMapper objectMapper = new ObjectMapper();
-      this.listResultSchema = objectMapper.readValue(stream,
+      this.listResultSchema = JacksonUtil.getSharedReader().readValue(stream,
           ListResultSchema.class);
     } catch (IOException ex) {
       log.error("Unable to deserialize list results", ex);

+ 3 - 2
hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java

@@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@@ -484,7 +485,7 @@ public final class DynoInfraUtils {
       final int blockThreshold, final Logger log) throws IOException {
     final Set<String> dataNodesToReport = new HashSet<>();
 
-    JsonFactory fac = new JsonFactory();
+    JsonFactory fac = JacksonUtil.createBasicJsonFactory();
     JsonParser parser = fac.createParser(IOUtils
         .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));
 
@@ -554,7 +555,7 @@ public final class DynoInfraUtils {
           "Unable to retrieve JMX: " + conn.getResponseMessage());
     }
     InputStream in = conn.getInputStream();
-    JsonFactory fac = new JsonFactory();
+    JsonFactory fac = JacksonUtil.createBasicJsonFactory();
     JsonParser parser = fac.createParser(in);
     if (parser.nextToken() != JsonToken.START_OBJECT
         || parser.nextToken() != JsonToken.FIELD_NAME

+ 4 - 7
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java

@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.io.OutputStream;
 
 import com.fasterxml.jackson.core.JsonEncoding;
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.Version;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -36,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.mapreduce.ID;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.tools.rumen.datatypes.*;
@@ -55,8 +55,7 @@ public class Anonymizer extends Configured implements Tool {
   private StatePool statePool;
   
   private ObjectMapper outMapper = null;
-  private JsonFactory outFactory = null;
-  
+
   private void initialize(String[] args) throws Exception {
     try {
       for (int i = 0; i < args.length; ++i) {
@@ -85,7 +84,7 @@ public class Anonymizer extends Configured implements Tool {
     // initialize the state manager after the anonymizers are registered
     statePool.initialize(getConf());
      
-    outMapper = new ObjectMapper();
+    outMapper = JacksonUtil.createBasicObjectMapper();
     // define a module
     SimpleModule module = new SimpleModule(
         "Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", ""));
@@ -104,8 +103,6 @@ public class Anonymizer extends Configured implements Tool {
     
     // register the module with the object-mapper
     outMapper.registerModule(module);
-    
-    outFactory = outMapper.getFactory();
   }
   
   // anonymize the job trace file
@@ -191,7 +188,7 @@ public class Anonymizer extends Configured implements Tool {
     }
 
     JsonGenerator outGen =
-        outFactory.createGenerator(output, JsonEncoding.UTF8);
+        outMapper.createGenerator(output, JsonEncoding.UTF8);
     outGen.useDefaultPrettyPrinter();
     
     return outGen;

+ 5 - 4
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java

@@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A simple wrapper for parsing JSON-encoded data using ObjectMapper.
@@ -48,10 +49,10 @@ class JsonObjectMapperParser<T> implements Closeable {
    */
   public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
       Configuration conf) throws IOException {
-    mapper = new ObjectMapper();
+    mapper = JacksonUtil.createBasicObjectMapper();
     this.clazz = clazz;
     InputStream input = new PossiblyDecompressedInputStream(path, conf);
-    jsonParser = mapper.getFactory().createParser(input);
+    jsonParser = mapper.createParser(input);
   }
 
   /**
@@ -62,9 +63,9 @@ class JsonObjectMapperParser<T> implements Closeable {
    */
   public JsonObjectMapperParser(InputStream input, Class<? extends T> clazz)
       throws IOException {
-    mapper = new ObjectMapper();
+    mapper = JacksonUtil.createBasicObjectMapper();
     this.clazz = clazz;
-    jsonParser = mapper.getFactory().createParser(input);
+    jsonParser = mapper.createParser(input);
   }
 
   /**

+ 3 - 2
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java

@@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.ID;
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
 import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer;
 import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * Simple wrapper around {@link JsonGenerator} to write objects in JSON format.
@@ -39,7 +40,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
   private JsonGenerator writer;
   
   public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
 
     // define a module
     SimpleModule module = new SimpleModule(
@@ -53,7 +54,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8);
+    writer = mapper.createGenerator(output, JsonEncoding.UTF8);
     if (prettyPrint) {
       writer.useDefaultPrettyPrinter();
     }

+ 5 - 6
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java

@@ -30,7 +30,6 @@ import java.util.HashMap;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.core.JsonEncoding;
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.Version;
@@ -44,6 +43,7 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.Anonymizer;
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A pool of states. States used by {@link DataType}'s can be managed the 
@@ -206,7 +206,7 @@ public class StatePool {
   }
   
   private void read(DataInput in) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     // define a module
     SimpleModule module = new SimpleModule("State Serializer",  
         new Version(0, 1, 1, "FINAL", "", ""));
@@ -216,7 +216,7 @@ public class StatePool {
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    JsonParser parser = mapper.getFactory().createParser((InputStream)in);
+    JsonParser parser = mapper.createParser((InputStream)in);
     StatePool statePool = mapper.readValue(parser, StatePool.class);
     this.setStates(statePool.getStates());
     parser.close();
@@ -273,7 +273,7 @@ public class StatePool {
   private void write(DataOutput out) throws IOException {
     // This is just a JSON experiment
     System.out.println("Dumping the StatePool's in JSON format.");
-    ObjectMapper outMapper = new ObjectMapper();
+    ObjectMapper outMapper = JacksonUtil.createBasicObjectMapper();
     // define a module
     SimpleModule module = new SimpleModule("State Serializer",  
         new Version(0, 1, 1, "FINAL", "", ""));
@@ -283,9 +283,8 @@ public class StatePool {
     // register the module with the object-mapper
     outMapper.registerModule(module);
 
-    JsonFactory outFactory = outMapper.getFactory();
     JsonGenerator jGen =
-        outFactory.createGenerator((OutputStream)out, JsonEncoding.UTF8);
+        outMapper.createGenerator((OutputStream)out, JsonEncoding.UTF8);
     jGen.useDefaultPrettyPrinter();
 
     jGen.writeObject(this);

+ 1 - 3
hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java

@@ -23,7 +23,6 @@ import java.io.OutputStream;
 import java.util.List;
 
 import com.fasterxml.jackson.core.JsonEncoding;
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
@@ -141,9 +140,8 @@ public class TestHistograms {
         Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);
 
         ObjectMapper mapper = new ObjectMapper();
-        JsonFactory factory = mapper.getFactory();
         FSDataOutputStream ostream = lfs.create(goldFilePath, true);
-        JsonGenerator gen = factory.createGenerator((OutputStream)ostream,
+        JsonGenerator gen = mapper.createGenerator((OutputStream)ostream,
             JsonEncoding.UTF8);
         gen.useDefaultPrettyPrinter();
         

+ 3 - 7
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java

@@ -16,13 +16,13 @@
 
 package org.apache.hadoop.yarn.sls;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.JavaType;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.JobTraceReader;
 import org.apache.hadoop.tools.rumen.LoggedJob;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ReservationId;
@@ -44,11 +44,8 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.Reader;
 import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -122,15 +119,14 @@ public class AMRunner {
    * Parse workload from a SLS trace file.
    */
   private void startAMFromSLSTrace(String inputTrace) throws IOException {
-    JsonFactory jsonF = new JsonFactory();
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
 
     try (Reader input = new InputStreamReader(
         new FileInputStream(inputTrace), StandardCharsets.UTF_8)) {
       JavaType type = mapper.getTypeFactory().
           constructMapType(Map.class, String.class, String.class);
       Iterator<Map<String, String>> jobIter = mapper.readValues(
-          jsonF.createParser(input), type);
+          mapper.createParser(input), type);
 
       while (jobIter.hasNext()) {
         try {

+ 4 - 4
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java

@@ -35,7 +35,6 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.commons.cli.CommandLine;
@@ -44,6 +43,7 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.sls.utils.SLSUtils;
 
 @Private
@@ -126,10 +126,10 @@ public class RumenToSLSConverter {
             StandardCharsets.UTF_8)) {
       try (Writer output =
           new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
-        ObjectMapper mapper = new ObjectMapper();
+        ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
         ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         Iterator<Map> i = mapper.readValues(
-            new JsonFactory().createParser(input), Map.class);
+            mapper.createParser(input), Map.class);
         while (i.hasNext()) {
           Map m = i.next();
           output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
@@ -143,7 +143,7 @@ public class RumenToSLSConverter {
           throws IOException {
     try (Writer output =
         new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
-      ObjectMapper mapper = new ObjectMapper();
+      ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
       ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
       for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
         Map rack = new LinkedHashMap();

+ 3 - 1
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java

@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.JobStoryProducer;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.ExecutionType;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
@@ -88,7 +89,8 @@ public class SynthTraceJobProducer implements JobStoryProducer {
 
     JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
     jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
-    ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
+
+    ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
     mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     FileSystem ifs = path.getFileSystem(conf);

+ 5 - 7
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java

@@ -34,7 +34,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -45,6 +44,7 @@ import org.apache.hadoop.tools.rumen.JobTraceReader;
 import org.apache.hadoop.tools.rumen.LoggedJob;
 import org.apache.hadoop.tools.rumen.LoggedTask;
 import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.NodeLabel;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceInformation;
@@ -120,12 +120,11 @@ public class SLSUtils {
   public static Set<NodeDetails> parseNodesFromSLSTrace(
       String jobTrace) throws IOException {
     Set<NodeDetails> nodeSet = new HashSet<>();
-    JsonFactory jsonF = new JsonFactory();
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     Reader input =
         new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8);
     try {
-      Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
+      Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class);
       while (i.hasNext()) {
         addNodes(nodeSet, i.next());
       }
@@ -167,12 +166,11 @@ public class SLSUtils {
   public static Set<NodeDetails> parseNodesFromNodeFile(
       String nodeFile, Resource nmDefaultResource) throws IOException {
     Set<NodeDetails> nodeSet = new HashSet<>();
-    JsonFactory jsonF = new JsonFactory();
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     Reader input =
         new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8);
     try {
-      Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
+      Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class);
       while (i.hasNext()) {
         Map jsonE = i.next();
         String rack = "/" + jsonE.get("rack");

+ 3 - 2
hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.yarn.sls;
 
 import org.apache.commons.math3.random.JDKRandomGenerator;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.ExecutionType;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.sls.synthetic.SynthJob;
@@ -60,7 +61,7 @@ public class TestSynthJobGeneration {
 
     JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
     jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
-    ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
+    ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
     mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
     SynthTraceJobProducer.Workload wl =
         mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class);
@@ -181,7 +182,7 @@ public class TestSynthJobGeneration {
 
     JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
     jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
-    ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
+    ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
     mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     JDKRandomGenerator rand = new JDKRandomGenerator();

+ 26 - 28
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java

@@ -28,6 +28,7 @@ import java.util.List;
 import java.util.Properties;
 import java.util.Random;
 
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
 import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry;
 import org.apache.hadoop.yarn.appcatalog.model.Application;
@@ -57,6 +58,18 @@ public class AppCatalogSolrClient {
   private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class);
   private static String urlString;
 
+  /**
+   * It is more performant to reuse ObjectMapper instances but keeping the instance
+   * private makes it harder for someone to reconfigure it which might have unwanted
+   * side effects.
+   */
+  private static final ObjectMapper OBJECT_MAPPER;
+
+  static {
+    OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+    OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
+
   public AppCatalogSolrClient() {
     // Locate Solr URL
     ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
@@ -146,8 +159,6 @@ public class AppCatalogSolrClient {
 
   public List<AppEntry> listAppEntries() {
     List<AppEntry> list = new ArrayList<AppEntry>();
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     SolrClient solr = getSolrClient();
     SolrQuery query = new SolrQuery();
@@ -164,7 +175,7 @@ public class AppCatalogSolrClient {
         entry.setId(d.get("id").toString());
         entry.setName(d.get("name_s").toString());
         entry.setApp(d.get("app_s").toString());
-        entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
+        entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
             Service.class));
         list.add(entry);
       }
@@ -176,8 +187,6 @@ public class AppCatalogSolrClient {
 
   public AppStoreEntry findAppStoreEntry(String id) {
     AppStoreEntry entry = new AppStoreEntry();
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     SolrClient solr = getSolrClient();
     SolrQuery query = new SolrQuery();
@@ -197,7 +206,7 @@ public class AppCatalogSolrClient {
         entry.setDesc(d.get("desc_s").toString());
         entry.setLike(Integer.parseInt(d.get("like_i").toString()));
         entry.setDownload(Integer.parseInt(d.get("download_i").toString()));
-        Service yarnApp = mapper.readValue(d.get("yarnfile_s").toString(),
+        Service yarnApp = OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
             Service.class);
         String name;
         try {
@@ -222,9 +231,6 @@ public class AppCatalogSolrClient {
 
   public AppEntry findAppEntry(String id) {
     AppEntry entry = new AppEntry();
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-
     SolrClient solr = getSolrClient();
     SolrQuery query = new SolrQuery();
     query.setQuery("id:" + id);
@@ -240,7 +246,7 @@ public class AppCatalogSolrClient {
         entry.setId(d.get("id").toString());
         entry.setApp(d.get("app_s").toString());
         entry.setName(d.get("name_s").toString());
-        entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
+        entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
             Service.class));
       }
     } catch (SolrServerException | IOException e) {
@@ -252,8 +258,6 @@ public class AppCatalogSolrClient {
   public void deployApp(String id, Service service) throws SolrServerException,
       IOException {
     long download = 0;
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
     // Find application information from AppStore
@@ -287,7 +291,7 @@ public class AppCatalogSolrClient {
       request.addField("id", name);
       request.addField("name_s", name);
       request.addField("app_s", entry.getOrg()+"/"+entry.getName());
-      request.addField("yarnfile_s", mapper.writeValueAsString(service));
+      request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service));
       docs.add(request);
     }
 
@@ -326,8 +330,6 @@ public class AppCatalogSolrClient {
   public void register(Application app) throws IOException {
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     try {
       SolrInputDocument buffer = new SolrInputDocument();
       buffer.setField("id", java.util.UUID.randomUUID().toString()
@@ -343,10 +345,10 @@ public class AppCatalogSolrClient {
       buffer.setField("download_i", 0);
 
       // Keep only YARN data model for yarnfile field
-      String yarnFile = mapper.writeValueAsString(app);
-      LOG.info("app:"+yarnFile);
-      Service yarnApp = mapper.readValue(yarnFile, Service.class);
-      buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
+      String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
+      LOG.info("app:{}", yarnFile);
+      Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class);
+      buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp));
 
       docs.add(buffer);
       commitSolrChanges(solr, docs);
@@ -359,8 +361,6 @@ public class AppCatalogSolrClient {
   protected void register(AppStoreEntry app) throws IOException {
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     try {
       SolrInputDocument buffer = new SolrInputDocument();
       buffer.setField("id", java.util.UUID.randomUUID().toString()
@@ -376,10 +376,10 @@ public class AppCatalogSolrClient {
       buffer.setField("download_i", app.getDownload());
 
       // Keep only YARN data model for yarnfile field
-      String yarnFile = mapper.writeValueAsString(app);
-      LOG.info("app:"+yarnFile);
-      Service yarnApp = mapper.readValue(yarnFile, Service.class);
-      buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
+      String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
+      LOG.info("app:{}", yarnFile);
+      Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class);
+      buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp));
 
       docs.add(buffer);
       commitSolrChanges(solr, docs);
@@ -391,8 +391,6 @@ public class AppCatalogSolrClient {
 
   public void upgradeApp(Service service) throws IOException,
       SolrServerException {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
     if (service!=null) {
@@ -420,7 +418,7 @@ public class AppCatalogSolrClient {
       request.addField("id", name);
       request.addField("name_s", name);
       request.addField("app_s", app);
-      request.addField("yarnfile_s", mapper.writeValueAsString(service));
+      request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service));
       docs.add(request);
     }
     try {

+ 19 - 15
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java

@@ -23,6 +23,7 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.api.records.ServiceState;
@@ -46,6 +47,19 @@ import org.slf4j.LoggerFactory;
 public class YarnServiceClient {
 
   private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class);
+
+  /**
+   * It is more performant to reuse ObjectMapper instances but keeping the instance
+   * private makes it harder for someone to reconfigure it which might have unwanted
+   * side effects.
+   */
+  private static final ObjectMapper OBJECT_MAPPER;
+
+  static {
+    OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+    OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
+
   private static Configuration conf = new Configuration();
   private static ClientConfig getClientConfig() {
     ClientConfig config = new DefaultClientConfig();
@@ -66,8 +80,6 @@ public class YarnServiceClient {
   }
 
   public void createApp(Service app) {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     ClientResponse response;
     try {
       boolean useKerberos = UserGroupInformation.isSecurityEnabled();
@@ -90,7 +102,7 @@ public class YarnServiceClient {
         app.setKerberosPrincipal(kerberos);
       }
       response = asc.getApiClient().post(ClientResponse.class,
-          mapper.writeValueAsString(app));
+          OBJECT_MAPPER.writeValueAsString(app));
       if (response.getStatus() >= 299) {
         String message = response.getEntity(String.class);
         throw new RuntimeException("Failed : HTTP error code : "
@@ -119,10 +131,8 @@ public class YarnServiceClient {
   }
 
   public void restartApp(Service app) throws JsonProcessingException {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = app.getName();
-    String yarnFile = mapper.writeValueAsString(app);
+    String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
     ClientResponse response;
     try {
       response = asc.getApiClient(asc.getServicePath(appInstanceId))
@@ -139,10 +149,8 @@ public class YarnServiceClient {
   }
 
   public void stopApp(Service app) throws JsonProcessingException {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = app.getName();
-    String yarnFile = mapper.writeValueAsString(app);
+    String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
     ClientResponse response;
     try {
       response = asc.getApiClient(asc.getServicePath(appInstanceId))
@@ -159,14 +167,12 @@ public class YarnServiceClient {
   }
 
   public void getStatus(AppEntry entry) {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = entry.getName();
     Service app = null;
     try {
       String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId))
           .get(String.class);
-      app = mapper.readValue(yarnFile, Service.class);
+      app = OBJECT_MAPPER.readValue(yarnFile, Service.class);
       entry.setYarnfile(app);
     } catch (UniformInterfaceException | IOException e) {
       LOG.error("Error in fetching application status: ", e);
@@ -174,11 +180,9 @@ public class YarnServiceClient {
   }
 
   public void upgradeApp(Service app) throws JsonProcessingException {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = app.getName();
     app.setState(ServiceState.EXPRESS_UPGRADING);
-    String yarnFile = mapper.writeValueAsString(app);
+    String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
     ClientResponse response;
     try {
       response = asc.getApiClient(asc.getServicePath(appInstanceId))

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java

@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
 import org.apache.hadoop.registry.client.types.ServiceRecord;
 import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
@@ -875,7 +876,7 @@ public class ComponentInstance implements EventHandler<ComponentInstanceEvent>,
         doRegistryUpdate = false;
       }
     }
-    ObjectMapper mapper = new ObjectMapper();
+    final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     try {
       Map<String, List<Map<String, String>>> ports = null;
       ports = mapper.readValue(status.getExposedPorts(),

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java

@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -61,9 +62,10 @@ public class JsonSerDeser<T> {
   @SuppressWarnings("deprecation")
   public JsonSerDeser(Class<T> classType) {
     this.classType = classType;
-    this.mapper = new ObjectMapper();
+    this.mapper = JacksonUtil.createBasicObjectMapper();
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false);
+    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
   }
 
   public JsonSerDeser(Class<T> classType, PropertyNamingStrategy namingStrategy) {
@@ -231,7 +233,6 @@ public class JsonSerDeser<T> {
    * @throws JsonProcessingException parse problems
    */
   public String toJson(T instance) throws JsonProcessingException {
-    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
     return mapper.writeValueAsString(instance);
   }
 

+ 14 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java

@@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.SerializationFeature;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
 
 import java.io.IOException;
@@ -41,6 +42,18 @@ import java.util.Properties;
 @JsonInclude(value = JsonInclude.Include.NON_NULL)
 public class PublishedConfiguration {
 
+  /**
+   * It is more performant to reuse ObjectMapper instances but keeping the instance
+   * private makes it harder for someone to reconfigure it which might have unwanted
+   * side effects.
+   */
+  private static final ObjectMapper OBJECT_MAPPER;
+
+  static {
+    OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+    OBJECT_MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true);
+  }
+
   public String description;
   public long updated;
   
@@ -154,9 +167,7 @@ public class PublishedConfiguration {
    * @throws IOException marshalling failure
    */
   public String asJson() throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
-    String json = mapper.writeValueAsString(entries);
+    String json = OBJECT_MAPPER.writeValueAsString(entries);
     return json;
   }
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java

@@ -49,6 +49,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -60,7 +61,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -274,7 +274,7 @@ public class FileSystemTimelineWriter extends TimelineWriter{
   }
 
   private ObjectMapper createObjectMapper() {
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     mapper.setAnnotationIntrospector(
         new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
     mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@@ -365,8 +365,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
 
     protected void prepareForWrite() throws IOException{
       this.stream = createLogFileStream(fs, logPath);
-      this.jsonGenerator = new JsonFactory().createGenerator(
-          (OutputStream)stream);
+      this.jsonGenerator = JacksonUtil.getSharedWriter()
+          .createGenerator((OutputStream)stream);
       this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
       this.lastModifiedTime = Time.monotonicNow();
     }

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java

@@ -30,6 +30,7 @@ import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.apache.hadoop.util.JacksonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -62,7 +63,7 @@ public class TimelineClientImpl extends TimelineClient {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(TimelineClientImpl.class);
-  private static final ObjectMapper MAPPER = new ObjectMapper();
+  private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper();
   private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/";
 
   private static Options opts;

+ 4 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java

@@ -27,9 +27,9 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier;
 
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -96,9 +96,8 @@ public final class DockerClientConfigHandler {
     }
 
     // Parse the JSON and create the Tokens/Credentials.
-    ObjectMapper mapper = new ObjectMapper();
-    JsonFactory factory = mapper.getFactory();
-    JsonParser parser = factory.createParser(contents);
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    JsonParser parser = mapper.createParser(contents);
     JsonNode rootNode = mapper.readTree(parser);
 
     Credentials credentials = new Credentials();
@@ -161,7 +160,7 @@ public final class DockerClientConfigHandler {
       Credentials credentials) throws IOException {
     boolean foundDockerCred = false;
     if (credentials.numberOfTokens() > 0) {
-      ObjectMapper mapper = new ObjectMapper();
+      ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
       ObjectNode rootNode = mapper.createObjectNode();
       ObjectNode registryUrlNode = mapper.createObjectNode();
       for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java

@@ -31,6 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
@@ -53,11 +54,10 @@ public class TimelineUtils {
       "TIMELINE_FLOW_RUN_ID_TAG";
   public final static String DEFAULT_FLOW_VERSION = "1";
 
-  private static ObjectMapper mapper;
+  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
 
   static {
-    mapper = new ObjectMapper();
-    YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
+    YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER);
   }
 
   /**
@@ -90,9 +90,9 @@ public class TimelineUtils {
   public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
       throws JsonGenerationException, JsonMappingException, IOException {
     if (pretty) {
-      return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
+      return OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(o);
     } else {
-      return mapper.writeValueAsString(o);
+      return OBJECT_MAPPER.writeValueAsString(o);
     }
   }
 

+ 2 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java

@@ -28,8 +28,8 @@ import javax.servlet.http.Cookie;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.webapp.view.DefaultPage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,7 +42,6 @@ import com.google.inject.servlet.RequestScoped;
 @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"})
 public abstract class Controller implements Params {
   public static final Logger LOG = LoggerFactory.getLogger(Controller.class);
-  static final ObjectMapper jsonMapper = new ObjectMapper();
 
   @RequestScoped
   public static class RequestContext{
@@ -225,7 +224,7 @@ public abstract class Controller implements Params {
     context().rendered = true;
     context().response.setContentType(MimeType.JSON);
     try {
-      jsonMapper.writeValue(writer(), object);
+      JacksonUtil.getSharedWriter().writeValue(writer(), object);
     } catch (Exception e) {
       throw new WebAppException(e);
     }

+ 3 - 9
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java

@@ -19,11 +19,11 @@ package org.apache.hadoop.yarn.server.timeline;
 
 import java.io.IOException;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A utility class providing methods for serializing and deserializing
@@ -38,14 +38,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 public class GenericObjectMapper {
   private static final byte[] EMPTY_BYTES = new byte[0];
 
-  public static final ObjectReader OBJECT_READER;
-  public static final ObjectWriter OBJECT_WRITER;
-
-  static {
-    ObjectMapper mapper = new ObjectMapper();
-    OBJECT_READER = mapper.reader(Object.class);
-    OBJECT_WRITER = mapper.writer();
-  }
+  public static final ObjectReader OBJECT_READER = JacksonUtil.createBasicReaderFor(Object.class);
+  public static final ObjectWriter OBJECT_WRITER = JacksonUtil.getSharedWriter();
 
   /**
    * Serializes an Object into a byte array. Along with {@link #read(byte[])},

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java

@@ -43,6 +43,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord;
@@ -135,7 +136,7 @@ public class AuxServices extends AbstractService
     this.dirsHandler = nmContext.getLocalDirsHandler();
     this.delService = deletionService;
     this.userUGI = getRemoteUgi();
-    this.mapper = new ObjectMapper();
+    this.mapper = JacksonUtil.createBasicObjectMapper();
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     // Obtain services from configuration in init()
   }

+ 2 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java

@@ -28,11 +28,11 @@ import java.util.regex.Pattern;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.ObjectMapper;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
@@ -58,9 +58,8 @@ public class NetworkTagMappingJsonManager implements NetworkTagMappingManager {
           + " we have to set the configuration:" +
           YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH);
     }
-    ObjectMapper mapper = new ObjectMapper();
     try {
-      networkTagMapping = mapper.readValue(new File(mappingJsonFile),
+      networkTagMapping = JacksonUtil.getSharedReader().readValue(new File(mappingJsonFile),
           NetworkTagMapping.class);
     } catch (Exception e) {
       throw new YarnRuntimeException(e);

+ 3 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java

@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.concurrent.HadoopExecutors;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -91,6 +92,7 @@ import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_LAYER_MOUNTS
 import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN;
 import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL;
 import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*;
+
 /**
  * <p>This class is an extension of {@link OCIContainerRuntime} that uses the
  * native {@code container-executor} binary via a
@@ -206,7 +208,7 @@ public class RuncContainerRuntime extends OCIContainerRuntime {
     imageTagToManifestPlugin.init(conf);
     manifestToResourcesPlugin = chooseManifestToResourcesPlugin();
     manifestToResourcesPlugin.init(conf);
-    mapper = new ObjectMapper();
+    mapper = JacksonUtil.createBasicObjectMapper();
     defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME);
 
     allowedNetworks.clear();

+ 3 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java

@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.concurrent.HadoopExecutors;
 
 import java.io.BufferedReader;
@@ -42,7 +43,6 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -65,7 +65,6 @@ public class ImageTagToManifestPlugin extends AbstractService
     implements RuncImageTagToManifestPlugin {
 
   private Map<String, ImageManifest> manifestCache;
-  private ObjectMapper objMapper;
   private AtomicReference<Map<String, String>> localImageToHashCache =
       new AtomicReference<>(new HashMap<>());
   private AtomicReference<Map<String, String>> hdfsImageToHashCache =
@@ -107,7 +106,7 @@ public class ImageTagToManifestPlugin extends AbstractService
     }
 
     byte[] bytes = IOUtils.toByteArray(input);
-    manifest = objMapper.readValue(bytes, ImageManifest.class);
+    manifest = JacksonUtil.getSharedReader().readValue(bytes, ImageManifest.class);
 
     manifestCache.put(hash, manifest);
     return manifest;
@@ -279,7 +278,6 @@ public class ImageTagToManifestPlugin extends AbstractService
         DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/";
     int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE,
         DEFAULT_NUM_MANIFESTS_TO_CACHE);
-    this.objMapper = new ObjectMapper();
     this.manifestCache = Collections.synchronizedMap(
         new LRUCache(numManifestsToCache, 0.75f));
 
@@ -315,7 +313,7 @@ public class ImageTagToManifestPlugin extends AbstractService
   }
 
   private static class LRUCache extends LinkedHashMap<String, ImageManifest> {
-    private int cacheSize;
+    private final int cacheSize;
 
     LRUCache(int initialCapacity, float loadFactor) {
       super(initialCapacity, loadFactor, true);

+ 2 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java

@@ -20,10 +20,10 @@ package org.apache.hadoop.yarn.server.resourcemanager.resource;
 
 import org.apache.hadoop.classification.VisibleForTesting;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceInformation;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -105,8 +105,7 @@ public class ResourceProfilesManagerImpl implements ResourceProfilesManager {
         resourcesFile = tmp.getPath();
       }
     }
-    ObjectMapper mapper = new ObjectMapper();
-    Map data = mapper.readValue(new File(resourcesFile), Map.class);
+    Map data = JacksonUtil.getSharedReader().readValue(new File(resourcesFile), Map.class);
     Iterator iterator = data.entrySet().iterator();
     while (iterator.hasNext()) {
       Map.Entry entry = (Map.Entry) iterator.next();

+ 3 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java

@@ -27,6 +27,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule;
 import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction;
 import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions;
@@ -43,7 +44,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.placemen
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 
 public class MappingRuleCreator {
@@ -58,14 +58,12 @@ public class MappingRuleCreator {
 
   MappingRulesDescription getMappingRulesFromJson(byte[] contents)
       throws IOException {
-    ObjectMapper objectMapper = new ObjectMapper();
-    return objectMapper.readValue(contents, MappingRulesDescription.class);
+    return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class);
   }
 
   MappingRulesDescription getMappingRulesFromJson(String contents)
       throws IOException {
-    ObjectMapper objectMapper = new ObjectMapper();
-    return objectMapper.readValue(contents, MappingRulesDescription.class);
+    return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class);
   }
 
   public List<MappingRule> getMappingRulesFromFile(String jsonPath)

+ 9 - 6
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java

@@ -21,6 +21,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath;
 
@@ -52,9 +53,11 @@ public class LegacyMappingRuleToJson {
   public static final String JSON_NODE_MATCHES = "matches";
 
   /**
-   * Our internal object mapper, used to create JSON nodes.
+   * It is more performant to reuse ObjectMapper instances but keeping the instance
+   * private makes it harder for someone to reconfigure it which might have unwanted
+   * side effects.
    */
-  private ObjectMapper objectMapper = new ObjectMapper();
+  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
 
   /**
    * Collection to store the legacy group mapping rule strings.
@@ -138,8 +141,8 @@ public class LegacyMappingRuleToJson {
    */
   public String convert() {
     //creating the basic JSON config structure
-    ObjectNode rootNode = objectMapper.createObjectNode();
-    ArrayNode rulesNode = objectMapper.createArrayNode();
+    ObjectNode rootNode = OBJECT_MAPPER.createObjectNode();
+    ArrayNode rulesNode = OBJECT_MAPPER.createArrayNode();
     rootNode.set("rules", rulesNode);
 
     //Processing and adding all the user group mapping rules
@@ -158,7 +161,7 @@ public class LegacyMappingRuleToJson {
     }
 
     try {
-      return objectMapper
+      return OBJECT_MAPPER
           .writerWithDefaultPrettyPrinter()
           .writeValueAsString(rootNode);
     } catch (JsonProcessingException e) {
@@ -246,7 +249,7 @@ public class LegacyMappingRuleToJson {
    * @return The object node with the preset fields
    */
   private ObjectNode createDefaultRuleNode(String type) {
-    return objectMapper
+    return OBJECT_MAPPER
         .createObjectNode()
         .put("type", type)
         //All legacy rule fallback to place to default

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java

@@ -32,6 +32,7 @@ import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -55,7 +56,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.Dom
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
@@ -327,14 +327,14 @@ public class FSConfigToCSConfigConverter {
           placementConverter.convertPlacementPolicy(placementManager,
               ruleHandler, capacitySchedulerConfig, usePercentages);
 
-      ObjectMapper mapper = new ObjectMapper();
+      final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
       // close output stream if we write to a file, leave it open otherwise
       if (!consoleMode && rulesToFile) {
         mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true);
       } else {
         mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
       }
-      ObjectWriter writer = mapper.writer(new DefaultPrettyPrinter());
+      ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
 
       if (consoleMode && rulesToFile) {
         System.out.println("======= " + MAPPING_RULES_JSON + " =======");

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java

@@ -42,6 +42,7 @@ import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.service.ServiceOperations;
 import org.apache.hadoop.ipc.CallerContext;
 import org.apache.hadoop.util.ApplicationClassLoader;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@@ -320,7 +321,7 @@ public class EntityGroupFSTimelineStore extends CompositeService
       }
     }
 
-    objMapper = new ObjectMapper();
+    objMapper = JacksonUtil.createBasicObjectMapper();
     objMapper.setAnnotationIntrospector(
         new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
     jsonFactory = new MappingJsonFactory(objMapper);

+ 2 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java

@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.yarn.server.timeline;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils;
@@ -298,7 +298,6 @@ public class LevelDBCacheTimelineStore extends KeyValueBasedTimelineStore {
         }
       };
     }
-    static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 
     @SuppressWarnings("unchecked")
     private V getEntityForKey(byte[] key) throws IOException {
@@ -306,7 +305,7 @@ public class LevelDBCacheTimelineStore extends KeyValueBasedTimelineStore {
       if (resultRaw == null) {
         return null;
       }
-      return (V) OBJECT_MAPPER.readValue(resultRaw, TimelineEntity.class);
+      return (V) JacksonUtil.getSharedReader().readValue(resultRaw, TimelineEntity.class);
     }
 
     private byte[] getStartTimeKey(K entityId) {

+ 4 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java

@@ -18,7 +18,6 @@
 package org.apache.hadoop.yarn.server.timeline;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -31,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
@@ -108,7 +108,7 @@ public class PluginStoreTestUtils {
   }
 
   static ObjectMapper createObjectMapper() {
-    ObjectMapper mapper = new ObjectMapper();
+    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
     mapper.setAnnotationIntrospector(
         new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
     mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@@ -230,10 +230,9 @@ public class PluginStoreTestUtils {
   static void writeEntities(TimelineEntities entities, Path logPath,
       FileSystem fs) throws IOException {
     FSDataOutputStream outStream = createLogFile(logPath, fs);
-    JsonGenerator jsonGenerator
-        = new JsonFactory().createGenerator((OutputStream)outStream);
-    jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
     ObjectMapper objMapper = createObjectMapper();
+    JsonGenerator jsonGenerator = objMapper.createGenerator((OutputStream)outStream);
+    jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
     for (TimelineEntity entity : entities.getEntities()) {
       objMapper.writeValue(jsonGenerator, entity);
     }

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java

@@ -23,6 +23,7 @@ import java.io.IOException;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A simple util class for Json SerDe.
@@ -31,7 +32,7 @@ public final class JsonUtils {
 
   private JsonUtils(){}
 
-  private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
+  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
 
   static {
     OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java

@@ -46,6 +46,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
@@ -104,11 +105,10 @@ public class FileSystemTimelineReaderImpl extends AbstractService
     return rootPath.toString();
   }
 
-  private static ObjectMapper mapper;
+  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
 
   static {
-    mapper = new ObjectMapper();
-    YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
+    YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER);
   }
 
   /**
@@ -127,7 +127,7 @@ public class FileSystemTimelineReaderImpl extends AbstractService
   public static <T> T getTimelineRecordFromJSON(
       String jsonString, Class<T> clazz)
       throws JsonGenerationException, JsonMappingException, IOException {
-    return mapper.readValue(jsonString, clazz);
+    return OBJECT_MAPPER.readValue(jsonString, clazz);
   }
 
   private static void fillFields(TimelineEntity finalEntity,