Selaa lähdekoodia

Revert "HADOOP-19231. Add JacksonUtil to manage Jackson classes (#6953)"

This reverts commit fa9bb0d1ac4b27a37ba9df0ee3e1104f1cd85e64.
Ayush Saxena 8 kuukautta sitten
vanhempi
commit
0837c84a9f
71 muutettua tiedostoa jossa 296 lisäystä ja 392 poistoa
  1. 5 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
  2. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
  3. 2 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
  4. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
  5. 8 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
  6. 3 2
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
  7. 2 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
  8. 0 123
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java
  9. 6 3
      hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java
  10. 4 2
      hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java
  11. 4 3
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java
  12. 6 6
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java
  13. 9 5
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
  14. 4 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java
  15. 2 2
      hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
  16. 1 2
      hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
  17. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java
  18. 2 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java
  19. 5 4
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
  20. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
  21. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java
  22. 4 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java
  23. 3 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java
  24. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java
  25. 5 3
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java
  26. 3 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java
  27. 2 2
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
  28. 12 12
      hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
  29. 3 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java
  30. 1 2
      hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java
  31. 3 3
      hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java
  32. 2 2
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
  33. 4 4
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java
  34. 5 5
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java
  35. 5 3
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
  36. 4 3
      hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
  37. 2 3
      hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java
  38. 7 4
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java
  39. 4 5
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java
  40. 2 3
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java
  41. 6 5
      hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java
  42. 3 1
      hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java
  43. 7 3
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java
  44. 4 4
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java
  45. 1 3
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java
  46. 7 5
      hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java
  47. 2 3
      hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java
  48. 28 26
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java
  49. 15 19
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java
  50. 1 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java
  51. 2 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java
  52. 3 14
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java
  53. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
  54. 1 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
  55. 5 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
  56. 5 5
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java
  57. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java
  58. 9 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java
  59. 1 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
  60. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java
  61. 1 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java
  62. 5 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java
  63. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java
  64. 5 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java
  65. 6 9
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java
  66. 3 3
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java
  67. 1 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java
  68. 3 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java
  69. 5 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java
  70. 1 2
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java
  71. 4 4
      hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java

+ 5 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

@@ -22,6 +22,7 @@ import com.ctc.wstx.api.ReaderConfig;
 import com.ctc.wstx.io.StreamBootstrapper;
 import com.ctc.wstx.io.SystemId;
 import com.ctc.wstx.stax.WstxInputFactory;
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 
 import java.io.BufferedInputStream;
@@ -100,7 +101,6 @@ import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
 import org.apache.hadoop.security.alias.CredentialProviderFactory;
 import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
 import org.apache.hadoop.util.ConfigurationHelper;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringInterner;
@@ -3792,7 +3792,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
       throw new IllegalArgumentException("Property " +
           propertyName + " not found");
     } else {
-      JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
+      JsonFactory dumpFactory = new JsonFactory();
+      JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
       dumpGenerator.writeStartObject();
       dumpGenerator.writeFieldName("property");
       appendJSONProperty(dumpGenerator, config, propertyName,
@@ -3830,7 +3831,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    */
   public static void dumpConfiguration(Configuration config,
       Writer out) throws IOException {
-    JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
+    JsonFactory dumpFactory = new JsonFactory();
+    JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
     dumpGenerator.writeStartObject();
     dumpGenerator.writeFieldName("properties");
     dumpGenerator.writeStartArray();

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java

@@ -42,7 +42,6 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdenti
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
 import org.apache.hadoop.util.HttpExceptionUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.KMSUtil;
 import org.apache.http.client.utils.URIBuilder;
@@ -80,6 +79,7 @@ import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@@ -595,10 +595,11 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
         && conn.getContentType().trim().toLowerCase()
             .startsWith(APPLICATION_JSON_MIME)
         && klass != null) {
+      ObjectMapper mapper = new ObjectMapper();
       InputStream is = null;
       try {
         is = conn.getInputStream();
-        ret = JacksonUtil.getSharedReader().readValue(is, klass);
+        ret = mapper.readValue(is, klass);
       } finally {
         IOUtils.closeStream(is);
       }

+ 2 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java

@@ -38,10 +38,10 @@ import java.util.concurrent.atomic.AtomicReference;
 
 import javax.management.ObjectName;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AtomicDoubleArray;
 import org.apache.commons.lang3.exception.ExceptionUtils;
@@ -146,7 +146,7 @@ public class DecayRpcScheduler implements RpcScheduler,
   public static final Logger LOG =
       LoggerFactory.getLogger(DecayRpcScheduler.class);
 
-  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
+  private static final ObjectWriter WRITER = new ObjectMapper().writer();
 
   // Track the decayed and raw (no decay) number of calls for each schedulable
   // identity from all previous decay windows: idx 0 for decayed call cost and

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

@@ -121,7 +121,6 @@ import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ProtoUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
@@ -131,6 +130,7 @@ import org.apache.hadoop.tracing.SpanContext;
 import org.apache.hadoop.tracing.TraceScope;
 import org.apache.hadoop.tracing.Tracer;
 import org.apache.hadoop.tracing.TraceUtils;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -3843,8 +3843,9 @@ public abstract class Server {
    * @return Get the NumOpenConnections/User.
    */
   public String getNumOpenConnectionsPerUser() {
+    ObjectMapper mapper = new ObjectMapper();
     try {
-      return JacksonUtil.getSharedWriter()
+      return mapper
           .writeValueAsString(connectionManager.getUserToConnectionsMap());
     } catch (IOException ignored) {
     }

+ 8 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java

@@ -43,13 +43,13 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.http.HttpServer2;
-import org.apache.hadoop.util.JacksonUtil;
 
 /*
  * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has
@@ -134,6 +134,11 @@ public class JMXJsonServlet extends HttpServlet {
    */
   protected transient MBeanServer mBeanServer;
 
+  /**
+   * Json Factory to create Json generators for write objects in json format
+   */
+  protected transient JsonFactory jsonFactory;
+
   /**
    * Initialize this servlet.
    */
@@ -141,6 +146,7 @@ public class JMXJsonServlet extends HttpServlet {
   public void init() throws ServletException {
     // Retrieve the MBean server
     mBeanServer = ManagementFactory.getPlatformMBeanServer();
+    jsonFactory = new JsonFactory();
   }
 
   protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, 
@@ -181,7 +187,7 @@ public class JMXJsonServlet extends HttpServlet {
         response.setHeader(ACCESS_CONTROL_ALLOW_METHODS, "GET");
         response.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
 
-        jg = JacksonUtil.getSharedWriter().createGenerator(writer);
+        jg = jsonFactory.createGenerator(writer);
         jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
         jg.useDefaultPrettyPrinter();
         jg.writeStartObject();

+ 3 - 2
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java

@@ -21,8 +21,8 @@ package org.apache.hadoop.metrics2;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.util.JacksonUtil;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -46,7 +46,8 @@ public class MetricsJsonBuilder extends MetricsRecordBuilder {
   private final MetricsCollector parent;
   private Map<String, Object> innerMetrics = new LinkedHashMap<>();
 
-  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
+  private static final ObjectWriter WRITER =
+      new ObjectMapper().writer();
 
   /**
    * Build an instance.

+ 2 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java

@@ -46,7 +46,6 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.util.HttpExceptionUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -166,7 +165,7 @@ public abstract class DelegationTokenAuthenticationHandler
   @VisibleForTesting
   public void initJsonFactory(Properties config) {
     boolean hasFeature = false;
-    JsonFactory tmpJsonFactory = JacksonUtil.createBasicJsonFactory();
+    JsonFactory tmpJsonFactory = new JsonFactory();
 
     for (Map.Entry entry : config.entrySet()) {
       String key = (String)entry.getKey();
@@ -336,7 +335,7 @@ public abstract class DelegationTokenAuthenticationHandler
             if (map != null) {
               response.setContentType(MediaType.APPLICATION_JSON);
               Writer writer = response.getWriter();
-              ObjectMapper jsonMapper = JacksonUtil.createObjectMapper(jsonFactory);
+              ObjectMapper jsonMapper = new ObjectMapper(jsonFactory);
               jsonMapper.writeValue(writer, map);
               writer.write(ENTER);
               writer.flush();

+ 0 - 123
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JacksonUtil.java

@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.util;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectReader;
-import com.fasterxml.jackson.databind.ObjectWriter;
-import com.fasterxml.jackson.databind.json.JsonMapper;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-
-/**
- * Utility for sharing code related to Jackson usage in Hadoop.
- */
-@Private
-public final class JacksonUtil {
-
-  private static final ObjectMapper SHARED_BASIC_OBJECT_MAPPER = createBasicObjectMapper();
-  private static final ObjectReader SHARED_BASIC_OBJECT_READER =
-      SHARED_BASIC_OBJECT_MAPPER.reader();
-  private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER =
-      SHARED_BASIC_OBJECT_MAPPER.writer();
-  private static final ObjectWriter SHARED_BASIC_OBJECT_WRITER_PRETTY =
-      SHARED_BASIC_OBJECT_MAPPER.writerWithDefaultPrettyPrinter();
-
-  /**
-   * Creates a new {@link JsonFactory} instance with basic configuration.
-   *
-   * @return an {@link JsonFactory} with basic configuration
-   */
-  public static JsonFactory createBasicJsonFactory() {
-    // deliberately return a new instance instead of sharing one because we can't trust
-    // that users won't modify this instance
-    return new JsonFactory();
-  }
-
-  /**
-   * Creates a new {@link ObjectMapper} instance with basic configuration.
-   *
-   * @return an {@link ObjectMapper} with basic configuration
-   */
-  public static ObjectMapper createBasicObjectMapper() {
-    // deliberately return a new instance instead of sharing one because we can't trust
-    // that users won't modify this instance
-    return JsonMapper.builder(createBasicJsonFactory()).build();
-  }
-
-  /**
-   * Creates a new {@link ObjectMapper} instance based on the configuration
-   * in the input {@link JsonFactory}.
-   *
-   * @param jsonFactory a pre-configured {@link JsonFactory}
-   * @return an {@link ObjectMapper} with configuration set by the input {@link JsonFactory}.
-   */
-  public static ObjectMapper createObjectMapper(final JsonFactory jsonFactory) {
-    return JsonMapper.builder(jsonFactory).build();
-  }
-
-  /**
-   * Returns a shared {@link ObjectReader} instance with basic configuration.
-   *
-   * @return a shared {@link ObjectReader} instance with basic configuration
-   */
-  public static ObjectReader getSharedReader() {
-    return SHARED_BASIC_OBJECT_READER;
-  }
-
-  /**
-   * Returns an {@link ObjectReader} for the given type instance with basic configuration.
-   *
-   * @param type the class that the reader has to support
-   * @return an {@link ObjectReader} instance with basic configuration
-   */
-  public static ObjectReader createBasicReaderFor(Class<?> type) {
-    return SHARED_BASIC_OBJECT_MAPPER.readerFor(type);
-  }
-
-  /**
-   * Returns a shared {@link ObjectWriter} instance with basic configuration.
-   *
-   * @return a shared {@link ObjectWriter} instance with basic configuration
-   */
-  public static ObjectWriter getSharedWriter() {
-    return SHARED_BASIC_OBJECT_WRITER;
-  }
-
-  /**
-   * Returns a shared {@link ObjectWriter} instance with pretty print and basic configuration.
-   *
-   * @return a shared {@link ObjectWriter} instance with pretty print and basic configuration
-   */
-  public static ObjectWriter getSharedWriterWithPrettyPrint() {
-    return SHARED_BASIC_OBJECT_WRITER_PRETTY;
-  }
-
-  /**
-   * Returns an {@link ObjectWriter} for the given type instance with basic configuration.
-   *
-   * @param type the class that the writer has to support
-   * @return an {@link ObjectWriter} instance with basic configuration
-   */
-  public static ObjectWriter createBasicWriterFor(Class<?> type) {
-    return SHARED_BASIC_OBJECT_MAPPER.writerFor(type);
-  }
-
-  private JacksonUtil() {}
-}

+ 6 - 3
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JsonSerialization.java

@@ -76,8 +76,11 @@ public class JsonSerialization<T> {
   private final Class<T> classType;
   private final ObjectMapper mapper;
 
-  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint();
-  private static final ObjectReader MAP_READER = JacksonUtil.createBasicReaderFor(Map.class);
+  private static final ObjectWriter WRITER =
+      new ObjectMapper().writerWithDefaultPrettyPrinter();
+
+  private static final ObjectReader MAP_READER =
+      new ObjectMapper().readerFor(Map.class);
 
   /**
    * @return an ObjectWriter which pretty-prints its output
@@ -103,7 +106,7 @@ public class JsonSerialization<T> {
       boolean failOnUnknownProperties, boolean pretty) {
     Preconditions.checkArgument(classType != null, "null classType");
     this.classType = classType;
-    this.mapper = JacksonUtil.createBasicObjectMapper();
+    this.mapper = new ObjectMapper();
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
         failOnUnknownProperties);
     mapper.configure(SerializationFeature.INDENT_OUTPUT, pretty);

+ 4 - 2
hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONReader.java

@@ -17,8 +17,9 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.util.JacksonUtil;
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.WebApplicationException;
@@ -37,6 +38,7 @@ import java.util.Map;
 @Consumes(MediaType.APPLICATION_JSON)
 @InterfaceAudience.Private
 public class KMSJSONReader implements MessageBodyReader<Object> {
+  private static final ObjectMapper MAPPER = new ObjectMapper();
 
   @Override
   public boolean isReadable(Class<?> type, Type genericType,
@@ -50,6 +52,6 @@ public class KMSJSONReader implements MessageBodyReader<Object> {
       Annotation[] annotations, MediaType mediaType,
       MultivaluedMap<String, String> httpHeaders, InputStream entityStream)
       throws IOException, WebApplicationException {
-    return JacksonUtil.getSharedReader().readValue(entityStream, type);
+    return MAPPER.readValue(entityStream, type);
   }
 }

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkItem.java

@@ -20,8 +20,8 @@
 package org.apache.hadoop.hdfs.server.datanode;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -35,8 +35,9 @@ import java.io.IOException;
 @InterfaceStability.Unstable
 @JsonInclude(JsonInclude.Include.NON_DEFAULT)
 public class DiskBalancerWorkItem {
+  private static final ObjectMapper MAPPER = new ObjectMapper();
   private static final ObjectReader READER =
-      JacksonUtil.createBasicReaderFor(DiskBalancerWorkItem.class);
+      new ObjectMapper().readerFor(DiskBalancerWorkItem.class);
 
   private  long startTime;
   private long secondsElapsed;
@@ -172,7 +173,7 @@ public class DiskBalancerWorkItem {
    * @throws IOException
    */
   public String toJson() throws IOException {
-    return JacksonUtil.getSharedWriter().writeValueAsString(this);
+    return MAPPER.writeValueAsString(this);
   }
 
   /**

+ 6 - 6
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancerWorkStatus.java

@@ -23,7 +23,6 @@ package org.apache.hadoop.hdfs.server.datanode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.SerializationFeature;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -40,13 +39,14 @@ import static com.fasterxml.jackson.databind.type.TypeFactory.defaultInstance;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class DiskBalancerWorkStatus {
-  private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper();
+  private static final ObjectMapper MAPPER = new ObjectMapper();
   private static final ObjectMapper MAPPER_WITH_INDENT_OUTPUT =
-      JacksonUtil.createBasicObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
+      new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT);
   private static final ObjectReader READER_WORKSTATUS =
-      MAPPER.readerFor(DiskBalancerWorkStatus.class);
-  private static final ObjectReader READER_WORKENTRY = MAPPER.readerFor(
-      defaultInstance().constructCollectionType(List.class, DiskBalancerWorkEntry.class));
+      new ObjectMapper().readerFor(DiskBalancerWorkStatus.class);
+  private static final ObjectReader READER_WORKENTRY = new ObjectMapper()
+      .readerFor(defaultInstance().constructCollectionType(List.class,
+          DiskBalancerWorkEntry.class));
 
   private final List<DiskBalancerWorkEntry> currentState;
   private Result result;

+ 9 - 5
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java

@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hdfs.util;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import java.io.File;
@@ -40,7 +42,6 @@ import java.util.concurrent.TimeoutException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
-import org.apache.hadoop.util.JacksonUtil;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -82,6 +83,7 @@ public final class CombinedHostsFileReader {
   public static DatanodeAdminProperties[]
       readFile(final String hostsFilePath) throws IOException {
     DatanodeAdminProperties[] allDNs = new DatanodeAdminProperties[0];
+    ObjectMapper objectMapper = new ObjectMapper();
     File hostFile = new File(hostsFilePath);
     boolean tryOldFormat = false;
 
@@ -89,8 +91,7 @@ public final class CombinedHostsFileReader {
       try (Reader input =
           new InputStreamReader(
               Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
-        allDNs = JacksonUtil.getSharedReader()
-            .readValue(input, DatanodeAdminProperties[].class);
+        allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
       } catch (JsonMappingException jme) {
         // The old format doesn't have json top-level token to enclose
         // the array.
@@ -102,12 +103,15 @@ public final class CombinedHostsFileReader {
     }
 
     if (tryOldFormat) {
-      ObjectReader objectReader = JacksonUtil.createBasicReaderFor(DatanodeAdminProperties.class);
+      ObjectReader objectReader =
+          objectMapper.readerFor(DatanodeAdminProperties.class);
+      JsonFactory jsonFactory = new JsonFactory();
       List<DatanodeAdminProperties> all = new ArrayList<>();
       try (Reader input =
           new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
                   StandardCharsets.UTF_8)) {
-        Iterator<DatanodeAdminProperties> iterator = objectReader.readValues(input);
+        Iterator<DatanodeAdminProperties> iterator =
+            objectReader.readValues(jsonFactory.createParser(input));
         while (iterator.hasNext()) {
           DatanodeAdminProperties properties = iterator.next();
           all.add(properties);

+ 4 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java

@@ -26,11 +26,11 @@ import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.Set;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * Writer support for JSON-based datanode configuration, an alternative format
@@ -59,10 +59,12 @@ public final class CombinedHostsFileWriter {
    */
   public static void writeFile(final String hostsFile,
       final Set<DatanodeAdminProperties> allDNs) throws IOException {
+    final ObjectMapper objectMapper = new ObjectMapper();
+
     try (Writer output =
         new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
             StandardCharsets.UTF_8)) {
-      JacksonUtil.getSharedWriter().writeValue(output, allDNs);
+      objectMapper.writeValue(output, allDNs);
     }
   }
 }

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java

@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.hdfs.web;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
 import org.apache.hadoop.fs.ContentSummary;
@@ -654,7 +654,7 @@ public class JsonUtilClient {
     }
 
     final String namesInJson = (String) json.get("XAttrNames");
-    ObjectReader reader = JacksonUtil.createBasicReaderFor(List.class);
+    ObjectReader reader = new ObjectMapper().readerFor(List.class);
     final List<Object> xattrs = reader.readValue(namesInJson);
     final List<String> names =
         Lists.newArrayListWithCapacity(json.keySet().size());

+ 1 - 2
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java

@@ -71,7 +71,6 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthentica
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
 import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
 import org.apache.hadoop.util.HttpExceptionUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -1819,7 +1818,7 @@ public class HttpFSFileSystem extends FileSystem
 
   @VisibleForTesting
   static BlockLocation[] toBlockLocations(JSONObject json) throws IOException {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
     MapType subType = mapper.getTypeFactory().constructMapType(Map.class,
         String.class, BlockLocation[].class);
     MapType rootType = mapper.getTypeFactory().constructMapType(Map.class,

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowDiskTracker.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
@@ -31,7 +32,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
 import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports.DiskOp;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.Timer;
 import org.slf4j.Logger;
@@ -71,7 +71,7 @@ public class SlowDiskTracker {
   /**
    * ObjectWriter to convert JSON reports to String.
    */
-  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
+  private static final ObjectWriter WRITER = new ObjectMapper().writer();
 
   /**
    * Number of disks to include in JSON report per operation. We will return

+ 2 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/SlowPeerTracker.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
@@ -29,7 +30,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.protocol.OutlierMetrics;
 import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Timer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -75,8 +75,7 @@ public class SlowPeerTracker {
   /**
    * ObjectWriter to convert JSON reports to String.
    */
-  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriter();
-
+  private static final ObjectWriter WRITER = new ObjectMapper().writer();
   /**
    * Number of nodes to include in JSON report. We will return nodes with
    * the highest number of votes from peers.

+ 5 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java

@@ -79,18 +79,18 @@ import org.apache.hadoop.util.CloseableReferenceCount;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
-import org.apache.hadoop.util.Preconditions;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Timer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
+import org.apache.hadoop.util.Preconditions;
 import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
@@ -103,9 +103,10 @@ import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFact
 public class FsVolumeImpl implements FsVolumeSpi {
   public static final Logger LOG =
       LoggerFactory.getLogger(FsVolumeImpl.class);
-  private static final ObjectWriter WRITER = JacksonUtil.getSharedWriterWithPrettyPrint();
+  private static final ObjectWriter WRITER =
+      new ObjectMapper().writerWithDefaultPrettyPrinter();
   private static final ObjectReader READER =
-      JacksonUtil.createBasicReaderFor(BlockIteratorState.class);
+      new ObjectMapper().readerFor(BlockIteratorState.class);
 
   private final FsDatasetImpl dataset;
   private final String storageID;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java

@@ -32,6 +32,7 @@ import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -59,7 +60,6 @@ import org.apache.hadoop.hdfs.server.datanode.StorageLocation;
 import org.apache.hadoop.hdfs.server.datanode.checker.VolumeCheckResult;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Timer;
@@ -369,7 +369,7 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
   }
 
   private static final ObjectWriter WRITER =
-      JacksonUtil.getSharedWriterWithPrettyPrint();
+      new ObjectMapper().writerWithDefaultPrettyPrinter();
 
   private static class ProvidedBlockIteratorState {
     ProvidedBlockIteratorState() {

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/Command.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hdfs.server.diskbalancer.command;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import org.apache.commons.cli.CommandLine;
@@ -46,7 +47,6 @@ import org.apache.hadoop.hdfs.tools.DiskBalancerCLI;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.HostsFileReader;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -77,7 +77,8 @@ import java.util.TreeSet;
  * Common interface for command handling.
  */
 public abstract class Command extends Configured implements Closeable {
-  private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(HashMap.class);
+  private static final ObjectReader READER =
+      new ObjectMapper().readerFor(HashMap.class);
   static final Logger LOG = LoggerFactory.getLogger(Command.class);
   private Map<String, String> validArgs = new HashMap<>();
   private URI clusterURI;

+ 4 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/connectors/JsonNodeConnector.java

@@ -17,14 +17,15 @@
 
 package org.apache.hadoop.hdfs.server.diskbalancer.connectors;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster;
-import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
+import org.apache.hadoop.hdfs.server.diskbalancer.datamodel
+    .DiskBalancerDataNode;
 
 import java.io.File;
 import java.net.URL;
@@ -37,7 +38,7 @@ public class JsonNodeConnector implements ClusterConnector {
   private static final Logger LOG =
       LoggerFactory.getLogger(JsonNodeConnector.class);
   private static final ObjectReader READER =
-      JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class);
+      new ObjectMapper().readerFor(DiskBalancerCluster.class);
   private final URL clusterURI;
 
   /**

+ 3 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerCluster.java

@@ -19,7 +19,9 @@ package org.apache.hadoop.hdfs.server.diskbalancer.datamodel;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
+import org.apache.hadoop.util.Preconditions;
 
 import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
@@ -29,8 +31,6 @@ import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.Planner;
 import org.apache.hadoop.hdfs.server.diskbalancer.planner.PlannerFactory;
 import org.apache.hadoop.hdfs.web.JsonUtil;
-import org.apache.hadoop.util.JacksonUtil;
-import org.apache.hadoop.util.Preconditions;
 
 import java.io.File;
 import java.io.IOException;
@@ -73,7 +73,7 @@ public class DiskBalancerCluster {
   private static final Logger LOG =
       LoggerFactory.getLogger(DiskBalancerCluster.class);
   private static final ObjectReader READER =
-      JacksonUtil.createBasicReaderFor(DiskBalancerCluster.class);
+      new ObjectMapper().readerFor(DiskBalancerCluster.class);
   private final Set<String> exclusionList;
   private final Set<String> inclusionList;
   private ClusterConnector clusterConnector;

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/datamodel/DiskBalancerVolume.java

@@ -19,10 +19,10 @@ package org.apache.hadoop.hdfs.server.diskbalancer.datamodel;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 
 import org.apache.hadoop.hdfs.web.JsonUtil;
-import org.apache.hadoop.util.JacksonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,7 +34,7 @@ import java.io.IOException;
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class DiskBalancerVolume {
   private static final ObjectReader READER =
-      JacksonUtil.createBasicReaderFor(DiskBalancerVolume.class);
+      new ObjectMapper().readerFor(DiskBalancerVolume.class);
 
   private static final Logger LOG =
       LoggerFactory.getLogger(DiskBalancerVolume.class);

+ 5 - 3
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/planner/NodePlan.java

@@ -18,9 +18,9 @@
 package org.apache.hadoop.hdfs.server.diskbalancer.planner;
 
 import com.fasterxml.jackson.annotation.JsonTypeInfo;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.ObjectWriter;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Preconditions;
 
 import java.io.IOException;
@@ -39,8 +39,10 @@ public class NodePlan {
   private int port;
   private long timeStamp;
 
-  private static final ObjectReader READER = JacksonUtil.createBasicReaderFor(NodePlan.class);
-  private static final ObjectWriter WRITER = JacksonUtil.createBasicWriterFor(NodePlan.class);
+  private static final ObjectMapper MAPPER = new ObjectMapper();
+  private static final ObjectReader READER = MAPPER.readerFor(NodePlan.class);
+  private static final ObjectWriter WRITER = MAPPER.writerFor(
+      MAPPER.constructType(NodePlan.class));
   /**
    * returns timestamp when this plan was created.
    *

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NetworkTopologyServlet.java

@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
@@ -25,7 +26,6 @@ import org.apache.hadoop.net.Node;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 
 import javax.servlet.ServletContext;
@@ -123,7 +123,8 @@ public class NetworkTopologyServlet extends DfsServlet {
 
   protected void printJsonFormat(PrintStream stream, Map<String,
       TreeSet<String>> tree, ArrayList<String> racks) throws IOException {
-    JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(stream);
+    JsonFactory dumpFactory = new JsonFactory();
+    JsonGenerator dumpGenerator = dumpFactory.createGenerator(stream);
     dumpGenerator.writeStartArray();
 
     for(String r : racks) {

+ 2 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java

@@ -21,6 +21,7 @@ import java.io.IOException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.Phase;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
@@ -28,7 +29,6 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgressVie
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.Step;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.util.JacksonUtil;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 
@@ -61,7 +61,7 @@ public class StartupProgressServlet extends DfsServlet {
     StartupProgress prog = NameNodeHttpServer.getStartupProgressFromContext(
       getServletContext());
     StartupProgressView view = prog.createView();
-    JsonGenerator json = JacksonUtil.getSharedWriter().createGenerator(resp.getWriter());
+    JsonGenerator json = new JsonFactory().createGenerator(resp.getWriter());
     try {
       json.writeStartObject();
       json.writeNumberField(ELAPSED_TIME, view.getElapsedTime());

+ 12 - 12
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.web;
 
-import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.ContentSummary;
@@ -39,12 +38,13 @@ import org.apache.hadoop.hdfs.protocol.*;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Lists;
 import org.apache.hadoop.util.StringUtils;
 
 import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import java.io.IOException;
 import java.util.*;
 
@@ -52,11 +52,11 @@ import java.util.*;
 public class JsonUtil {
   private static final Object[] EMPTY_OBJECT_ARRAY = {};
 
-  // Reuse ObjectWriter instance for improving performance.
-  // ObjectWriter is thread safe as long as we always configure instance
+  // Reuse ObjectMapper instance for improving performance.
+  // ObjectMapper is thread safe as long as we always configure instance
   // before use. We don't have a re-entrant call pattern in WebHDFS,
   // so we just need to worry about thread-safety.
-  private static final ObjectWriter SHARED_WRITER = JacksonUtil.getSharedWriter();
+  private static final ObjectMapper MAPPER = new ObjectMapper();
 
   /** Convert a token object to a Json string. */
   public static String toJsonString(final Token<? extends TokenIdentifier> token
@@ -93,7 +93,7 @@ public class JsonUtil {
     final Map<String, Object> m = new TreeMap<String, Object>();
     m.put(key, value);
     try {
-      return SHARED_WRITER.writeValueAsString(m);
+      return MAPPER.writeValueAsString(m);
     } catch (IOException ignored) {
     }
     return null;
@@ -113,7 +113,7 @@ public class JsonUtil {
     final Map<String, Object> m = toJsonMap(status);
     try {
       return includeType ?
-          toJsonString(FileStatus.class, m) : SHARED_WRITER.writeValueAsString(m);
+          toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m);
     } catch (IOException ignored) {
     }
     return null;
@@ -453,7 +453,7 @@ public class JsonUtil {
     finalMap.put(AclStatus.class.getSimpleName(), m);
 
     try {
-      return SHARED_WRITER.writeValueAsString(finalMap);
+      return MAPPER.writeValueAsString(finalMap);
     } catch (IOException ignored) {
     }
     return null;
@@ -491,7 +491,7 @@ public class JsonUtil {
       final XAttrCodec encoding) throws IOException {
     final Map<String, Object> finalMap = new TreeMap<String, Object>();
     finalMap.put("XAttrs", toJsonArray(xAttrs, encoding));
-    return SHARED_WRITER.writeValueAsString(finalMap);
+    return MAPPER.writeValueAsString(finalMap);
   }
   
   public static String toJsonString(final List<XAttr> xAttrs)
@@ -500,14 +500,14 @@ public class JsonUtil {
     for (XAttr xAttr : xAttrs) {
       names.add(XAttrHelper.getPrefixedName(xAttr));
     }
-    String ret = SHARED_WRITER.writeValueAsString(names);
+    String ret = MAPPER.writeValueAsString(names);
     final Map<String, Object> finalMap = new TreeMap<String, Object>();
     finalMap.put("XAttrNames", ret);
-    return SHARED_WRITER.writeValueAsString(finalMap);
+    return MAPPER.writeValueAsString(finalMap);
   }
 
   public static String toJsonString(Object obj) throws IOException {
-    return SHARED_WRITER.writeValueAsString(obj);
+    return MAPPER.writeValueAsString(obj);
   }
 
   public static String toJsonString(BlockStoragePolicy[] storagePolicies) {

+ 3 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/QueueManager.java

@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.mapred;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerationException;
 import com.fasterxml.jackson.core.JsonGenerator;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -27,7 +28,6 @@ import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.QueueState;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -531,7 +531,8 @@ public class QueueManager {
       return;
     }
     
-    JsonGenerator dumpGenerator = JacksonUtil.getSharedWriter().createGenerator(out);
+    JsonFactory dumpFactory = new JsonFactory();
+    JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
     QueueConfigurationParser parser;
     boolean aclsEnabled = false;
     if (conf != null) {

+ 1 - 2
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/JobHistoryEventUtils.java

@@ -28,7 +28,6 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.CounterGroup;
 import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 
 /**
@@ -42,7 +41,7 @@ public final class JobHistoryEventUtils {
   public static final int ATS_CONFIG_PUBLISH_SIZE_BYTES = 10 * 1024;
 
   public static JsonNode countersToJSON(Counters counters) {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
     ArrayNode nodes = mapper.createArrayNode();
     if (counters != null) {
       for (CounterGroup counterGroup : counters) {

+ 3 - 3
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AEncryption.java

@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -29,7 +30,6 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.s3a.S3AUtils;
-import org.apache.hadoop.util.JacksonUtil;
 
 import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT;
 
@@ -91,8 +91,8 @@ public final class S3AEncryption {
       if (encryptionContextMap.isEmpty()) {
         return "";
       }
-      final String encryptionContextJson = JacksonUtil.getSharedWriter()
-          .writeValueAsString(encryptionContextMap);
+      final String encryptionContextJson = new ObjectMapper().writeValueAsString(
+          encryptionContextMap);
       return Base64.encodeBase64String(encryptionContextJson.getBytes(StandardCharsets.UTF_8));
     } catch (IOException e) {
       if (propagateExceptions) {

+ 2 - 2
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java

@@ -84,7 +84,6 @@ import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.LambdaUtils;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Time;
@@ -97,6 +96,7 @@ import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_ST
 import static org.apache.hadoop.fs.azure.NativeAzureFileSystemHelper.*;
 import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 import com.microsoft.azure.storage.StorageException;
 
@@ -127,7 +127,7 @@ public class NativeAzureFileSystem extends FileSystem {
     private static final int FORMATTING_BUFFER = 10000;
     private boolean committed;
     public static final String SUFFIX = "-RenamePending.json";
-    private static final ObjectReader READER = JacksonUtil.createBasicObjectMapper()
+    private static final ObjectReader READER = new ObjectMapper()
         .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
         .readerFor(JsonNode.class);
 

+ 4 - 4
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteSASKeyGeneratorImpl.java

@@ -24,11 +24,11 @@ import java.net.URISyntaxException;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
+import com.fasterxml.jackson.databind.ObjectReader;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.azure.security.Constants;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 
 import org.apache.http.NameValuePair;
@@ -40,7 +40,7 @@ import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectReader;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
 
@@ -53,8 +53,8 @@ public class RemoteSASKeyGeneratorImpl extends SASKeyGeneratorImpl {
 
   public static final Logger LOG =
       LoggerFactory.getLogger(AzureNativeFileSystemStore.class);
-  private static final ObjectReader RESPONSE_READER = JacksonUtil
-      .createBasicReaderFor(RemoteSASKeyGenerationResponse.class);
+  private static final ObjectReader RESPONSE_READER = new ObjectMapper()
+      .readerFor(RemoteSASKeyGenerationResponse.class);
 
   /**
    * Configuration parameter name expected in the Configuration

+ 5 - 5
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/RemoteWasbAuthorizerImpl.java

@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.azure;
 
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.commons.lang3.StringUtils;
@@ -28,14 +29,13 @@ import org.apache.hadoop.fs.azure.security.Constants;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryUtils;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.utils.URIBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import java.util.concurrent.TimeUnit;
 
 import java.io.IOException;
-import java.util.concurrent.TimeUnit;
 
 import static org.apache.hadoop.fs.azure.WasbRemoteCallHelper.REMOTE_CALL_SUCCESS_CODE;
 
@@ -49,8 +49,8 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface {
 
   public static final Logger LOG = LoggerFactory
       .getLogger(RemoteWasbAuthorizerImpl.class);
-  private static final ObjectReader RESPONSE_READER = JacksonUtil
-      .createBasicReaderFor(RemoteWasbAuthorizerResponse.class);
+  private static final ObjectReader RESPONSE_READER = new ObjectMapper()
+      .readerFor(RemoteWasbAuthorizerResponse.class);
 
   /**
    * Configuration parameter name expected in the Configuration object to
@@ -176,7 +176,7 @@ public class RemoteWasbAuthorizerImpl implements WasbAuthorizerInterface {
       uriBuilder
           .addParameter(WASB_ABSOLUTE_PATH_QUERY_PARAM_NAME, wasbAbsolutePath);
       uriBuilder.addParameter(ACCESS_OPERATION_QUERY_PARAM_NAME, accessType);
-      if (StringUtils.isNotEmpty(resourceOwner)) {
+      if (resourceOwner != null && StringUtils.isNotEmpty(resourceOwner)) {
         uriBuilder.addParameter(WASB_RESOURCE_OWNER_QUERY_PARAM_NAME,
             resourceOwner);
       }

+ 5 - 3
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java

@@ -29,6 +29,9 @@ import java.util.Date;
 import java.util.Hashtable;
 import java.util.Map;
 
+import org.apache.hadoop.util.Preconditions;
+
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonToken;
 import org.slf4j.Logger;
@@ -39,8 +42,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.azurebfs.services.AbfsIoUtils;
 import org.apache.hadoop.fs.azurebfs.services.ExponentialRetryPolicy;
-import org.apache.hadoop.util.JacksonUtil;
-import org.apache.hadoop.util.Preconditions;
 
 /**
  * This class provides convenience methods to obtain AAD tokens.
@@ -492,7 +493,8 @@ public final class AzureADAuthenticator {
       int expiryPeriodInSecs = 0;
       long expiresOnInSecs = -1;
 
-      JsonParser jp = JacksonUtil.createBasicJsonFactory().createParser(httpResponseStream);
+      JsonFactory jf = new JsonFactory();
+      JsonParser jp = jf.createParser(httpResponseStream);
       String fieldName, fieldValue;
       jp.nextToken();
       while (jp.hasCurrentToken()) {

+ 4 - 3
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java

@@ -30,6 +30,7 @@ import java.util.Map;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,7 +40,6 @@ import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
 import org.apache.hadoop.fs.azurebfs.contracts.services.AbfsPerfLoggable;
 import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema;
 import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * Base Http operation class for orchestrating server IO calls. Child classes would
@@ -447,7 +447,7 @@ public abstract class AbfsHttpOperation implements AbfsPerfLoggable {
       if (stream == null) {
         return;
       }
-      JsonFactory jf = JacksonUtil.createBasicJsonFactory();
+      JsonFactory jf = new JsonFactory();
       try (JsonParser jp = jf.createParser(stream)) {
         String fieldName, fieldValue;
         jp.nextToken();  // START_OBJECT - {
@@ -509,7 +509,8 @@ public abstract class AbfsHttpOperation implements AbfsPerfLoggable {
     }
 
     try {
-      this.listResultSchema = JacksonUtil.getSharedReader().readValue(stream,
+      final ObjectMapper objectMapper = new ObjectMapper();
+      this.listResultSchema = objectMapper.readValue(stream,
           ListResultSchema.class);
     } catch (IOException ex) {
       log.error("Unable to deserialize list results", ex);

+ 2 - 3
hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java

@@ -51,7 +51,6 @@ import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@@ -485,7 +484,7 @@ public final class DynoInfraUtils {
       final int blockThreshold, final Logger log) throws IOException {
     final Set<String> dataNodesToReport = new HashSet<>();
 
-    JsonFactory fac = JacksonUtil.createBasicJsonFactory();
+    JsonFactory fac = new JsonFactory();
     JsonParser parser = fac.createParser(IOUtils
         .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));
 
@@ -555,7 +554,7 @@ public final class DynoInfraUtils {
           "Unable to retrieve JMX: " + conn.getResponseMessage());
     }
     InputStream in = conn.getInputStream();
-    JsonFactory fac = JacksonUtil.createBasicJsonFactory();
+    JsonFactory fac = new JsonFactory();
     JsonParser parser = fac.createParser(in);
     if (parser.nextToken() != JsonToken.START_OBJECT
         || parser.nextToken() != JsonToken.FIELD_NAME

+ 7 - 4
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Anonymizer.java

@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.OutputStream;
 
 import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.Version;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -35,7 +36,6 @@ import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionCodecFactory;
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.mapreduce.ID;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.tools.rumen.datatypes.*;
@@ -55,7 +55,8 @@ public class Anonymizer extends Configured implements Tool {
   private StatePool statePool;
   
   private ObjectMapper outMapper = null;
-
+  private JsonFactory outFactory = null;
+  
   private void initialize(String[] args) throws Exception {
     try {
       for (int i = 0; i < args.length; ++i) {
@@ -84,7 +85,7 @@ public class Anonymizer extends Configured implements Tool {
     // initialize the state manager after the anonymizers are registered
     statePool.initialize(getConf());
      
-    outMapper = JacksonUtil.createBasicObjectMapper();
+    outMapper = new ObjectMapper();
     // define a module
     SimpleModule module = new SimpleModule(
         "Anonymization Serializer", new Version(0, 1, 1, "FINAL", "", ""));
@@ -103,6 +104,8 @@ public class Anonymizer extends Configured implements Tool {
     
     // register the module with the object-mapper
     outMapper.registerModule(module);
+    
+    outFactory = outMapper.getFactory();
   }
   
   // anonymize the job trace file
@@ -188,7 +191,7 @@ public class Anonymizer extends Configured implements Tool {
     }
 
     JsonGenerator outGen =
-        outMapper.createGenerator(output, JsonEncoding.UTF8);
+        outFactory.createGenerator(output, JsonEncoding.UTF8);
     outGen.useDefaultPrettyPrinter();
     
     return outGen;

+ 4 - 5
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperParser.java

@@ -26,7 +26,6 @@ import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A simple wrapper for parsing JSON-encoded data using ObjectMapper.
@@ -49,10 +48,10 @@ class JsonObjectMapperParser<T> implements Closeable {
    */
   public JsonObjectMapperParser(Path path, Class<? extends T> clazz,
       Configuration conf) throws IOException {
-    mapper = JacksonUtil.createBasicObjectMapper();
+    mapper = new ObjectMapper();
     this.clazz = clazz;
     InputStream input = new PossiblyDecompressedInputStream(path, conf);
-    jsonParser = mapper.createParser(input);
+    jsonParser = mapper.getFactory().createParser(input);
   }
 
   /**
@@ -63,9 +62,9 @@ class JsonObjectMapperParser<T> implements Closeable {
    */
   public JsonObjectMapperParser(InputStream input, Class<? extends T> clazz)
       throws IOException {
-    mapper = JacksonUtil.createBasicObjectMapper();
+    mapper = new ObjectMapper();
     this.clazz = clazz;
-    jsonParser = mapper.createParser(input);
+    jsonParser = mapper.getFactory().createParser(input);
   }
 
   /**

+ 2 - 3
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JsonObjectMapperWriter.java

@@ -30,7 +30,6 @@ import org.apache.hadoop.mapreduce.ID;
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
 import org.apache.hadoop.tools.rumen.serializers.DefaultRumenSerializer;
 import org.apache.hadoop.tools.rumen.serializers.ObjectStringSerializer;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * Simple wrapper around {@link JsonGenerator} to write objects in JSON format.
@@ -40,7 +39,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
   private JsonGenerator writer;
   
   public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
 
     // define a module
     SimpleModule module = new SimpleModule(
@@ -54,7 +53,7 @@ public class JsonObjectMapperWriter<T> implements Closeable {
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    writer = mapper.createGenerator(output, JsonEncoding.UTF8);
+    writer = mapper.getFactory().createGenerator(output, JsonEncoding.UTF8);
     if (prettyPrint) {
       writer.useDefaultPrettyPrinter();
     }

+ 6 - 5
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java

@@ -30,6 +30,7 @@ import java.util.HashMap;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.Version;
@@ -43,7 +44,6 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.Anonymizer;
 import org.apache.hadoop.tools.rumen.datatypes.DataType;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A pool of states. States used by {@link DataType}'s can be managed the 
@@ -206,7 +206,7 @@ public class StatePool {
   }
   
   private void read(DataInput in) throws IOException {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
     // define a module
     SimpleModule module = new SimpleModule("State Serializer",  
         new Version(0, 1, 1, "FINAL", "", ""));
@@ -216,7 +216,7 @@ public class StatePool {
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    JsonParser parser = mapper.createParser((InputStream)in);
+    JsonParser parser = mapper.getFactory().createParser((InputStream)in);
     StatePool statePool = mapper.readValue(parser, StatePool.class);
     this.setStates(statePool.getStates());
     parser.close();
@@ -273,7 +273,7 @@ public class StatePool {
   private void write(DataOutput out) throws IOException {
     // This is just a JSON experiment
     System.out.println("Dumping the StatePool's in JSON format.");
-    ObjectMapper outMapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper outMapper = new ObjectMapper();
     // define a module
     SimpleModule module = new SimpleModule("State Serializer",  
         new Version(0, 1, 1, "FINAL", "", ""));
@@ -283,8 +283,9 @@ public class StatePool {
     // register the module with the object-mapper
     outMapper.registerModule(module);
 
+    JsonFactory outFactory = outMapper.getFactory();
     JsonGenerator jGen =
-        outMapper.createGenerator((OutputStream)out, JsonEncoding.UTF8);
+        outFactory.createGenerator((OutputStream)out, JsonEncoding.UTF8);
     jGen.useDefaultPrettyPrinter();
 
     jGen.writeObject(this);

+ 3 - 1
hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java

@@ -23,6 +23,7 @@ import java.io.OutputStream;
 import java.util.List;
 
 import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
@@ -140,8 +141,9 @@ public class TestHistograms {
         Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);
 
         ObjectMapper mapper = new ObjectMapper();
+        JsonFactory factory = mapper.getFactory();
         FSDataOutputStream ostream = lfs.create(goldFilePath, true);
-        JsonGenerator gen = mapper.createGenerator((OutputStream)ostream,
+        JsonGenerator gen = factory.createGenerator((OutputStream)ostream,
             JsonEncoding.UTF8);
         gen.useDefaultPrettyPrinter();
         

+ 7 - 3
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/AMRunner.java

@@ -16,13 +16,13 @@
 
 package org.apache.hadoop.yarn.sls;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.JavaType;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.JobTraceReader;
 import org.apache.hadoop.tools.rumen.LoggedJob;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ReservationId;
@@ -44,8 +44,11 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.Reader;
 import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -119,14 +122,15 @@ public class AMRunner {
    * Parse workload from a SLS trace file.
    */
   private void startAMFromSLSTrace(String inputTrace) throws IOException {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    JsonFactory jsonF = new JsonFactory();
+    ObjectMapper mapper = new ObjectMapper();
 
     try (Reader input = new InputStreamReader(
         new FileInputStream(inputTrace), StandardCharsets.UTF_8)) {
       JavaType type = mapper.getTypeFactory().
           constructMapType(Map.class, String.class, String.class);
       Iterator<Map<String, String>> jobIter = mapper.readValues(
-          mapper.createParser(input), type);
+          jsonF.createParser(input), type);
 
       while (jobIter.hasNext()) {
         try {

+ 4 - 4
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java

@@ -35,6 +35,7 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.commons.cli.CommandLine;
@@ -43,7 +44,6 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.sls.utils.SLSUtils;
 
 @Private
@@ -126,10 +126,10 @@ public class RumenToSLSConverter {
             StandardCharsets.UTF_8)) {
       try (Writer output =
           new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
-        ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+        ObjectMapper mapper = new ObjectMapper();
         ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         Iterator<Map> i = mapper.readValues(
-            mapper.createParser(input), Map.class);
+            new JsonFactory().createParser(input), Map.class);
         while (i.hasNext()) {
           Map m = i.next();
           output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
@@ -143,7 +143,7 @@ public class RumenToSLSConverter {
           throws IOException {
     try (Writer output =
         new OutputStreamWriter(new FileOutputStream(outputFile), StandardCharsets.UTF_8)) {
-      ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+      ObjectMapper mapper = new ObjectMapper();
       ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
       for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
         Map rack = new LinkedHashMap();

+ 1 - 3
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java

@@ -34,7 +34,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.tools.rumen.JobStory;
 import org.apache.hadoop.tools.rumen.JobStoryProducer;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.ExecutionType;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
@@ -89,8 +88,7 @@ public class SynthTraceJobProducer implements JobStoryProducer {
 
     JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
     jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
-
-    ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
+    ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
     mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     FileSystem ifs = path.getFileSystem(conf);

+ 7 - 5
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/utils/SLSUtils.java

@@ -34,6 +34,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -44,7 +45,6 @@ import org.apache.hadoop.tools.rumen.JobTraceReader;
 import org.apache.hadoop.tools.rumen.LoggedJob;
 import org.apache.hadoop.tools.rumen.LoggedTask;
 import org.apache.hadoop.tools.rumen.LoggedTaskAttempt;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.NodeLabel;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceInformation;
@@ -120,11 +120,12 @@ public class SLSUtils {
   public static Set<NodeDetails> parseNodesFromSLSTrace(
       String jobTrace) throws IOException {
     Set<NodeDetails> nodeSet = new HashSet<>();
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    JsonFactory jsonF = new JsonFactory();
+    ObjectMapper mapper = new ObjectMapper();
     Reader input =
         new InputStreamReader(new FileInputStream(jobTrace), StandardCharsets.UTF_8);
     try {
-      Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class);
+      Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
       while (i.hasNext()) {
         addNodes(nodeSet, i.next());
       }
@@ -166,11 +167,12 @@ public class SLSUtils {
   public static Set<NodeDetails> parseNodesFromNodeFile(
       String nodeFile, Resource nmDefaultResource) throws IOException {
     Set<NodeDetails> nodeSet = new HashSet<>();
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    JsonFactory jsonF = new JsonFactory();
+    ObjectMapper mapper = new ObjectMapper();
     Reader input =
         new InputStreamReader(new FileInputStream(nodeFile), StandardCharsets.UTF_8);
     try {
-      Iterator<Map> i = mapper.readValues(mapper.createParser(input), Map.class);
+      Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
       while (i.hasNext()) {
         Map jsonE = i.next();
         String rack = "/" + jsonE.get("rack");

+ 2 - 3
hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java

@@ -18,7 +18,6 @@
 package org.apache.hadoop.yarn.sls;
 
 import org.apache.commons.math3.random.JDKRandomGenerator;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.ExecutionType;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.sls.synthetic.SynthJob;
@@ -61,7 +60,7 @@ public class TestSynthJobGeneration {
 
     JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
     jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
-    ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
+    ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
     mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
     SynthTraceJobProducer.Workload wl =
         mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class);
@@ -182,7 +181,7 @@ public class TestSynthJobGeneration {
 
     JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
     jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
-    ObjectMapper mapper = JacksonUtil.createObjectMapper(jsonFactoryBuilder.build());
+    ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
     mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     JDKRandomGenerator rand = new JDKRandomGenerator();

+ 28 - 26
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/AppCatalogSolrClient.java

@@ -28,7 +28,6 @@ import java.util.List;
 import java.util.Properties;
 import java.util.Random;
 
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
 import org.apache.hadoop.yarn.appcatalog.model.AppStoreEntry;
 import org.apache.hadoop.yarn.appcatalog.model.Application;
@@ -58,18 +57,6 @@ public class AppCatalogSolrClient {
   private static final Logger LOG = LoggerFactory.getLogger(AppCatalogSolrClient.class);
   private static String urlString;
 
-  /**
-   * It is more performant to reuse ObjectMapper instances but keeping the instance
-   * private makes it harder for someone to reconfigure it which might have unwanted
-   * side effects.
-   */
-  private static final ObjectMapper OBJECT_MAPPER;
-
-  static {
-    OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
-    OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-  }
-
   public AppCatalogSolrClient() {
     // Locate Solr URL
     ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
@@ -159,6 +146,8 @@ public class AppCatalogSolrClient {
 
   public List<AppEntry> listAppEntries() {
     List<AppEntry> list = new ArrayList<AppEntry>();
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     SolrClient solr = getSolrClient();
     SolrQuery query = new SolrQuery();
@@ -175,7 +164,7 @@ public class AppCatalogSolrClient {
         entry.setId(d.get("id").toString());
         entry.setName(d.get("name_s").toString());
         entry.setApp(d.get("app_s").toString());
-        entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
+        entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
             Service.class));
         list.add(entry);
       }
@@ -187,6 +176,8 @@ public class AppCatalogSolrClient {
 
   public AppStoreEntry findAppStoreEntry(String id) {
     AppStoreEntry entry = new AppStoreEntry();
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
 
     SolrClient solr = getSolrClient();
     SolrQuery query = new SolrQuery();
@@ -206,7 +197,7 @@ public class AppCatalogSolrClient {
         entry.setDesc(d.get("desc_s").toString());
         entry.setLike(Integer.parseInt(d.get("like_i").toString()));
         entry.setDownload(Integer.parseInt(d.get("download_i").toString()));
-        Service yarnApp = OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
+        Service yarnApp = mapper.readValue(d.get("yarnfile_s").toString(),
             Service.class);
         String name;
         try {
@@ -231,6 +222,9 @@ public class AppCatalogSolrClient {
 
   public AppEntry findAppEntry(String id) {
     AppEntry entry = new AppEntry();
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
     SolrClient solr = getSolrClient();
     SolrQuery query = new SolrQuery();
     query.setQuery("id:" + id);
@@ -246,7 +240,7 @@ public class AppCatalogSolrClient {
         entry.setId(d.get("id").toString());
         entry.setApp(d.get("app_s").toString());
         entry.setName(d.get("name_s").toString());
-        entry.setYarnfile(OBJECT_MAPPER.readValue(d.get("yarnfile_s").toString(),
+        entry.setYarnfile(mapper.readValue(d.get("yarnfile_s").toString(),
             Service.class));
       }
     } catch (SolrServerException | IOException e) {
@@ -258,6 +252,8 @@ public class AppCatalogSolrClient {
   public void deployApp(String id, Service service) throws SolrServerException,
       IOException {
     long download = 0;
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
     // Find application information from AppStore
@@ -291,7 +287,7 @@ public class AppCatalogSolrClient {
       request.addField("id", name);
       request.addField("name_s", name);
       request.addField("app_s", entry.getOrg()+"/"+entry.getName());
-      request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service));
+      request.addField("yarnfile_s", mapper.writeValueAsString(service));
       docs.add(request);
     }
 
@@ -330,6 +326,8 @@ public class AppCatalogSolrClient {
   public void register(Application app) throws IOException {
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     try {
       SolrInputDocument buffer = new SolrInputDocument();
       buffer.setField("id", java.util.UUID.randomUUID().toString()
@@ -345,10 +343,10 @@ public class AppCatalogSolrClient {
       buffer.setField("download_i", 0);
 
       // Keep only YARN data model for yarnfile field
-      String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
-      LOG.info("app:{}", yarnFile);
-      Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class);
-      buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp));
+      String yarnFile = mapper.writeValueAsString(app);
+      LOG.info("app:"+yarnFile);
+      Service yarnApp = mapper.readValue(yarnFile, Service.class);
+      buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
 
       docs.add(buffer);
       commitSolrChanges(solr, docs);
@@ -361,6 +359,8 @@ public class AppCatalogSolrClient {
   protected void register(AppStoreEntry app) throws IOException {
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     try {
       SolrInputDocument buffer = new SolrInputDocument();
       buffer.setField("id", java.util.UUID.randomUUID().toString()
@@ -376,10 +376,10 @@ public class AppCatalogSolrClient {
       buffer.setField("download_i", app.getDownload());
 
       // Keep only YARN data model for yarnfile field
-      String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
-      LOG.info("app:{}", yarnFile);
-      Service yarnApp = OBJECT_MAPPER.readValue(yarnFile, Service.class);
-      buffer.setField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(yarnApp));
+      String yarnFile = mapper.writeValueAsString(app);
+      LOG.info("app:"+yarnFile);
+      Service yarnApp = mapper.readValue(yarnFile, Service.class);
+      buffer.setField("yarnfile_s", mapper.writeValueAsString(yarnApp));
 
       docs.add(buffer);
       commitSolrChanges(solr, docs);
@@ -391,6 +391,8 @@ public class AppCatalogSolrClient {
 
   public void upgradeApp(Service service) throws IOException,
       SolrServerException {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     Collection<SolrInputDocument> docs = new HashSet<SolrInputDocument>();
     SolrClient solr = getSolrClient();
     if (service!=null) {
@@ -418,7 +420,7 @@ public class AppCatalogSolrClient {
       request.addField("id", name);
       request.addField("name_s", name);
       request.addField("app_s", app);
-      request.addField("yarnfile_s", OBJECT_MAPPER.writeValueAsString(service));
+      request.addField("yarnfile_s", mapper.writeValueAsString(service));
       docs.add(request);
     }
     try {

+ 15 - 19
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-catalog/hadoop-yarn-applications-catalog-webapp/src/main/java/org/apache/hadoop/yarn/appcatalog/application/YarnServiceClient.java

@@ -23,7 +23,6 @@ import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.appcatalog.model.AppEntry;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.api.records.ServiceState;
@@ -47,19 +46,6 @@ import org.slf4j.LoggerFactory;
 public class YarnServiceClient {
 
   private static final Logger LOG = LoggerFactory.getLogger(YarnServiceClient.class);
-
-  /**
-   * It is more performant to reuse ObjectMapper instances but keeping the instance
-   * private makes it harder for someone to reconfigure it which might have unwanted
-   * side effects.
-   */
-  private static final ObjectMapper OBJECT_MAPPER;
-
-  static {
-    OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
-    OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-  }
-
   private static Configuration conf = new Configuration();
   private static ClientConfig getClientConfig() {
     ClientConfig config = new DefaultClientConfig();
@@ -80,6 +66,8 @@ public class YarnServiceClient {
   }
 
   public void createApp(Service app) {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     ClientResponse response;
     try {
       boolean useKerberos = UserGroupInformation.isSecurityEnabled();
@@ -102,7 +90,7 @@ public class YarnServiceClient {
         app.setKerberosPrincipal(kerberos);
       }
       response = asc.getApiClient().post(ClientResponse.class,
-          OBJECT_MAPPER.writeValueAsString(app));
+          mapper.writeValueAsString(app));
       if (response.getStatus() >= 299) {
         String message = response.getEntity(String.class);
         throw new RuntimeException("Failed : HTTP error code : "
@@ -131,8 +119,10 @@ public class YarnServiceClient {
   }
 
   public void restartApp(Service app) throws JsonProcessingException {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = app.getName();
-    String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
+    String yarnFile = mapper.writeValueAsString(app);
     ClientResponse response;
     try {
       response = asc.getApiClient(asc.getServicePath(appInstanceId))
@@ -149,8 +139,10 @@ public class YarnServiceClient {
   }
 
   public void stopApp(Service app) throws JsonProcessingException {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = app.getName();
-    String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
+    String yarnFile = mapper.writeValueAsString(app);
     ClientResponse response;
     try {
       response = asc.getApiClient(asc.getServicePath(appInstanceId))
@@ -167,12 +159,14 @@ public class YarnServiceClient {
   }
 
   public void getStatus(AppEntry entry) {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = entry.getName();
     Service app = null;
     try {
       String yarnFile = asc.getApiClient(asc.getServicePath(appInstanceId))
           .get(String.class);
-      app = OBJECT_MAPPER.readValue(yarnFile, Service.class);
+      app = mapper.readValue(yarnFile, Service.class);
       entry.setYarnfile(app);
     } catch (UniformInterfaceException | IOException e) {
       LOG.error("Error in fetching application status: ", e);
@@ -180,9 +174,11 @@ public class YarnServiceClient {
   }
 
   public void upgradeApp(Service app) throws JsonProcessingException {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     String appInstanceId = app.getName();
     app.setState(ServiceState.EXPRESS_UPGRADING);
-    String yarnFile = OBJECT_MAPPER.writeValueAsString(app);
+    String yarnFile = mapper.writeValueAsString(app);
     ClientResponse response;
     try {
       response = asc.getApiClient(asc.getServicePath(appInstanceId))

+ 1 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/instance/ComponentInstance.java

@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
 import org.apache.hadoop.registry.client.types.ServiceRecord;
 import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
@@ -876,7 +875,7 @@ public class ComponentInstance implements EventHandler<ComponentInstanceEvent>,
         doRegistryUpdate = false;
       }
     }
-    final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
     try {
       Map<String, List<Map<String, String>>> ports = null;
       ports = mapper.readValue(status.getExposedPorts(),

+ 2 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java

@@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -62,10 +61,9 @@ public class JsonSerDeser<T> {
   @SuppressWarnings("deprecation")
   public JsonSerDeser(Class<T> classType) {
     this.classType = classType;
-    this.mapper = JacksonUtil.createBasicObjectMapper();
+    this.mapper = new ObjectMapper();
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false);
-    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
   }
 
   public JsonSerDeser(Class<T> classType, PropertyNamingStrategy namingStrategy) {
@@ -233,6 +231,7 @@ public class JsonSerDeser<T> {
    * @throws JsonProcessingException parse problems
    */
   public String toJson(T instance) throws JsonProcessingException {
+    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
     return mapper.writeValueAsString(instance);
   }
 

+ 3 - 14
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java

@@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.SerializationFeature;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
 
 import java.io.IOException;
@@ -42,18 +41,6 @@ import java.util.Properties;
 @JsonInclude(value = JsonInclude.Include.NON_NULL)
 public class PublishedConfiguration {
 
-  /**
-   * It is more performant to reuse ObjectMapper instances but keeping the instance
-   * private makes it harder for someone to reconfigure it which might have unwanted
-   * side effects.
-   */
-  private static final ObjectMapper OBJECT_MAPPER;
-
-  static {
-    OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
-    OBJECT_MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true);
-  }
-
   public String description;
   public long updated;
   
@@ -167,7 +154,9 @@ public class PublishedConfiguration {
    * @throws IOException marshalling failure
    */
   public String asJson() throws IOException {
-    String json = OBJECT_MAPPER.writeValueAsString(entries);
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(SerializationFeature.INDENT_OUTPUT, true);
+    String json = mapper.writeValueAsString(entries);
     return json;
   }
 

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java

@@ -49,7 +49,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -61,6 +60,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -274,7 +274,7 @@ public class FileSystemTimelineWriter extends TimelineWriter{
   }
 
   private ObjectMapper createObjectMapper() {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
     mapper.setAnnotationIntrospector(
         new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
     mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@@ -365,8 +365,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
 
     protected void prepareForWrite() throws IOException{
       this.stream = createLogFileStream(fs, logPath);
-      this.jsonGenerator = JacksonUtil.getSharedWriter()
-          .createGenerator((OutputStream)stream);
+      this.jsonGenerator = new JsonFactory().createGenerator(
+          (OutputStream)stream);
       this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
       this.lastModifiedTime = Time.monotonicNow();
     }

+ 1 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java

@@ -30,7 +30,6 @@ import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
-import org.apache.hadoop.util.JacksonUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -63,7 +62,7 @@ public class TimelineClientImpl extends TimelineClient {
 
   private static final Logger LOG =
       LoggerFactory.getLogger(TimelineClientImpl.class);
-  private static final ObjectMapper MAPPER = JacksonUtil.createBasicObjectMapper();
+  private static final ObjectMapper MAPPER = new ObjectMapper();
   private static final String RESOURCE_URI_STR_V1 = "/ws/v1/timeline/";
 
   private static Options opts;

+ 5 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java

@@ -27,9 +27,9 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier;
 
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -96,8 +96,9 @@ public final class DockerClientConfigHandler {
     }
 
     // Parse the JSON and create the Tokens/Credentials.
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
-    JsonParser parser = mapper.createParser(contents);
+    ObjectMapper mapper = new ObjectMapper();
+    JsonFactory factory = mapper.getFactory();
+    JsonParser parser = factory.createParser(contents);
     JsonNode rootNode = mapper.readTree(parser);
 
     Credentials credentials = new Credentials();
@@ -160,7 +161,7 @@ public final class DockerClientConfigHandler {
       Credentials credentials) throws IOException {
     boolean foundDockerCred = false;
     if (credentials.numberOfTokens() > 0) {
-      ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+      ObjectMapper mapper = new ObjectMapper();
       ObjectNode rootNode = mapper.createObjectNode();
       ObjectNode registryUrlNode = mapper.createObjectNode();
       for (Token<? extends TokenIdentifier> tk : credentials.getAllTokens()) {

+ 5 - 5
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java

@@ -31,7 +31,6 @@ import org.apache.hadoop.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
@@ -54,10 +53,11 @@ public class TimelineUtils {
       "TIMELINE_FLOW_RUN_ID_TAG";
   public final static String DEFAULT_FLOW_VERSION = "1";
 
-  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+  private static ObjectMapper mapper;
 
   static {
-    YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER);
+    mapper = new ObjectMapper();
+    YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
   }
 
   /**
@@ -90,9 +90,9 @@ public class TimelineUtils {
   public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
       throws JsonGenerationException, JsonMappingException, IOException {
     if (pretty) {
-      return OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(o);
+      return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
     } else {
-      return OBJECT_MAPPER.writeValueAsString(o);
+      return mapper.writeValueAsString(o);
     }
   }
 

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Controller.java

@@ -28,8 +28,8 @@ import javax.servlet.http.Cookie;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.webapp.view.DefaultPage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,6 +42,7 @@ import com.google.inject.servlet.RequestScoped;
 @InterfaceAudience.LimitedPrivate({"YARN", "MapReduce"})
 public abstract class Controller implements Params {
   public static final Logger LOG = LoggerFactory.getLogger(Controller.class);
+  static final ObjectMapper jsonMapper = new ObjectMapper();
 
   @RequestScoped
   public static class RequestContext{
@@ -224,7 +225,7 @@ public abstract class Controller implements Params {
     context().rendered = true;
     context().response.setContentType(MimeType.JSON);
     try {
-      JacksonUtil.getSharedWriter().writeValue(writer(), object);
+      jsonMapper.writeValue(writer(), object);
     } catch (Exception e) {
       throw new WebAppException(e);
     }

+ 9 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/GenericObjectMapper.java

@@ -19,11 +19,11 @@ package org.apache.hadoop.yarn.server.timeline;
 
 import java.io.IOException;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A utility class providing methods for serializing and deserializing
@@ -38,8 +38,14 @@ import org.apache.hadoop.util.JacksonUtil;
 public class GenericObjectMapper {
   private static final byte[] EMPTY_BYTES = new byte[0];
 
-  public static final ObjectReader OBJECT_READER = JacksonUtil.createBasicReaderFor(Object.class);
-  public static final ObjectWriter OBJECT_WRITER = JacksonUtil.getSharedWriter();
+  public static final ObjectReader OBJECT_READER;
+  public static final ObjectWriter OBJECT_WRITER;
+
+  static {
+    ObjectMapper mapper = new ObjectMapper();
+    OBJECT_READER = mapper.reader(Object.class);
+    OBJECT_WRITER = mapper.writer();
+  }
 
   /**
    * Serializes an Object into a byte array. Along with {@link #read(byte[])},

+ 1 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java

@@ -43,7 +43,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceConfiguration;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceFile;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.records.AuxServiceRecord;
@@ -136,7 +135,7 @@ public class AuxServices extends AbstractService
     this.dirsHandler = nmContext.getLocalDirsHandler();
     this.delService = deletionService;
     this.userUGI = getRemoteUgi();
-    this.mapper = JacksonUtil.createBasicObjectMapper();
+    this.mapper = new ObjectMapper();
     mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
     // Obtain services from configuration in init()
   }

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java

@@ -28,11 +28,11 @@ import java.util.regex.Pattern;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
@@ -58,8 +58,9 @@ public class NetworkTagMappingJsonManager implements NetworkTagMappingManager {
           + " we have to set the configuration:" +
           YarnConfiguration.NM_NETWORK_TAG_MAPPING_FILE_PATH);
     }
+    ObjectMapper mapper = new ObjectMapper();
     try {
-      networkTagMapping = JacksonUtil.getSharedReader().readValue(new File(mappingJsonFile),
+      networkTagMapping = mapper.readValue(new File(mappingJsonFile),
           NetworkTagMapping.class);
     } catch (Exception e) {
       throw new YarnRuntimeException(e);

+ 1 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java

@@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
 import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.concurrent.HadoopExecutors;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -92,7 +91,6 @@ import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_LAYER_MOUNTS
 import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN;
 import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL;
 import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*;
-
 /**
  * <p>This class is an extension of {@link OCIContainerRuntime} that uses the
  * native {@code container-executor} binary via a
@@ -208,7 +206,7 @@ public class RuncContainerRuntime extends OCIContainerRuntime {
     imageTagToManifestPlugin.init(conf);
     manifestToResourcesPlugin = chooseManifestToResourcesPlugin();
     manifestToResourcesPlugin.init(conf);
-    mapper = JacksonUtil.createBasicObjectMapper();
+    mapper = new ObjectMapper();
     defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME);
 
     allowedNetworks.clear();

+ 5 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java

@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.concurrent.HadoopExecutors;
 
 import java.io.BufferedReader;
@@ -43,6 +42,7 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -65,6 +65,7 @@ public class ImageTagToManifestPlugin extends AbstractService
     implements RuncImageTagToManifestPlugin {
 
   private Map<String, ImageManifest> manifestCache;
+  private ObjectMapper objMapper;
   private AtomicReference<Map<String, String>> localImageToHashCache =
       new AtomicReference<>(new HashMap<>());
   private AtomicReference<Map<String, String>> hdfsImageToHashCache =
@@ -106,7 +107,7 @@ public class ImageTagToManifestPlugin extends AbstractService
     }
 
     byte[] bytes = IOUtils.toByteArray(input);
-    manifest = JacksonUtil.getSharedReader().readValue(bytes, ImageManifest.class);
+    manifest = objMapper.readValue(bytes, ImageManifest.class);
 
     manifestCache.put(hash, manifest);
     return manifest;
@@ -278,6 +279,7 @@ public class ImageTagToManifestPlugin extends AbstractService
         DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR) + "/manifests/";
     int numManifestsToCache = conf.getInt(NM_RUNC_NUM_MANIFESTS_TO_CACHE,
         DEFAULT_NUM_MANIFESTS_TO_CACHE);
+    this.objMapper = new ObjectMapper();
     this.manifestCache = Collections.synchronizedMap(
         new LRUCache(numManifestsToCache, 0.75f));
 
@@ -313,7 +315,7 @@ public class ImageTagToManifestPlugin extends AbstractService
   }
 
   private static class LRUCache extends LinkedHashMap<String, ImageManifest> {
-    private final int cacheSize;
+    private int cacheSize;
 
     LRUCache(int initialCapacity, float loadFactor) {
       super(initialCapacity, loadFactor, true);

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java

@@ -20,10 +20,10 @@ package org.apache.hadoop.yarn.server.resourcemanager.resource;
 
 import org.apache.hadoop.classification.VisibleForTesting;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceInformation;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -105,7 +105,8 @@ public class ResourceProfilesManagerImpl implements ResourceProfilesManager {
         resourcesFile = tmp.getPath();
       }
     }
-    Map data = JacksonUtil.getSharedReader().readValue(new File(resourcesFile), Map.class);
+    ObjectMapper mapper = new ObjectMapper();
+    Map data = mapper.readValue(new File(resourcesFile), Map.class);
     Iterator iterator = data.entrySet().iterator();
     while (iterator.hasNext()) {
       Map.Entry entry = (Map.Entry) iterator.next();

+ 5 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/MappingRuleCreator.java

@@ -27,7 +27,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRule;
 import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleAction;
 import org.apache.hadoop.yarn.server.resourcemanager.placement.csmappingrule.MappingRuleActions;
@@ -44,6 +43,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.placemen
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.VisibleForTesting;
 
 public class MappingRuleCreator {
@@ -58,12 +58,14 @@ public class MappingRuleCreator {
 
   MappingRulesDescription getMappingRulesFromJson(byte[] contents)
       throws IOException {
-    return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class);
+    ObjectMapper objectMapper = new ObjectMapper();
+    return objectMapper.readValue(contents, MappingRulesDescription.class);
   }
 
   MappingRulesDescription getMappingRulesFromJson(String contents)
       throws IOException {
-    return JacksonUtil.getSharedReader().readValue(contents, MappingRulesDescription.class);
+    ObjectMapper objectMapper = new ObjectMapper();
+    return objectMapper.readValue(contents, MappingRulesDescription.class);
   }
 
   public List<MappingRule> getMappingRulesFromFile(String jsonPath)

+ 6 - 9
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/placement/converter/LegacyMappingRuleToJson.java

@@ -21,7 +21,6 @@ import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath;
 
@@ -53,11 +52,9 @@ public class LegacyMappingRuleToJson {
   public static final String JSON_NODE_MATCHES = "matches";
 
   /**
-   * It is more performant to reuse ObjectMapper instances but keeping the instance
-   * private makes it harder for someone to reconfigure it which might have unwanted
-   * side effects.
+   * Our internal object mapper, used to create JSON nodes.
    */
-  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+  private ObjectMapper objectMapper = new ObjectMapper();
 
   /**
    * Collection to store the legacy group mapping rule strings.
@@ -141,8 +138,8 @@ public class LegacyMappingRuleToJson {
    */
   public String convert() {
     //creating the basic JSON config structure
-    ObjectNode rootNode = OBJECT_MAPPER.createObjectNode();
-    ArrayNode rulesNode = OBJECT_MAPPER.createArrayNode();
+    ObjectNode rootNode = objectMapper.createObjectNode();
+    ArrayNode rulesNode = objectMapper.createArrayNode();
     rootNode.set("rules", rulesNode);
 
     //Processing and adding all the user group mapping rules
@@ -161,7 +158,7 @@ public class LegacyMappingRuleToJson {
     }
 
     try {
-      return OBJECT_MAPPER
+      return objectMapper
           .writerWithDefaultPrettyPrinter()
           .writeValueAsString(rootNode);
     } catch (JsonProcessingException e) {
@@ -249,7 +246,7 @@ public class LegacyMappingRuleToJson {
    * @return The object node with the preset fields
    */
   private ObjectNode createDefaultRuleNode(String type) {
-    return OBJECT_MAPPER
+    return objectMapper
         .createObjectNode()
         .put("type", type)
         //All legacy rule fallback to place to default

+ 3 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java

@@ -32,7 +32,6 @@ import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -56,6 +55,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.Dom
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.hadoop.classification.VisibleForTesting;
@@ -327,14 +327,14 @@ public class FSConfigToCSConfigConverter {
           placementConverter.convertPlacementPolicy(placementManager,
               ruleHandler, capacitySchedulerConfig, usePercentages);
 
-      final ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+      ObjectMapper mapper = new ObjectMapper();
       // close output stream if we write to a file, leave it open otherwise
       if (!consoleMode && rulesToFile) {
         mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, true);
       } else {
         mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
       }
-      ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
+      ObjectWriter writer = mapper.writer(new DefaultPrettyPrinter());
 
       if (consoleMode && rulesToFile) {
         System.out.println("======= " + MAPPING_RULES_JSON + " =======");

+ 1 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java

@@ -42,7 +42,6 @@ import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.service.ServiceOperations;
 import org.apache.hadoop.ipc.CallerContext;
 import org.apache.hadoop.util.ApplicationClassLoader;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@@ -321,7 +320,7 @@ public class EntityGroupFSTimelineStore extends CompositeService
       }
     }
 
-    objMapper = JacksonUtil.createBasicObjectMapper();
+    objMapper = new ObjectMapper();
     objMapper.setAnnotationIntrospector(
         new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
     jsonFactory = new MappingJsonFactory(objMapper);

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LevelDBCacheTimelineStore.java

@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.yarn.server.timeline;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.timeline.util.LeveldbUtils;
@@ -298,6 +298,7 @@ public class LevelDBCacheTimelineStore extends KeyValueBasedTimelineStore {
         }
       };
     }
+    static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 
     @SuppressWarnings("unchecked")
     private V getEntityForKey(byte[] key) throws IOException {
@@ -305,7 +306,7 @@ public class LevelDBCacheTimelineStore extends KeyValueBasedTimelineStore {
       if (resultRaw == null) {
         return null;
       }
-      return (V) JacksonUtil.getSharedReader().readValue(resultRaw, TimelineEntity.class);
+      return (V) OBJECT_MAPPER.readValue(resultRaw, TimelineEntity.class);
     }
 
     private byte[] getStartTimeKey(K entityId) {

+ 5 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java

@@ -18,6 +18,7 @@
 package org.apache.hadoop.yarn.server.timeline;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -30,7 +31,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
@@ -108,7 +108,7 @@ public class PluginStoreTestUtils {
   }
 
   static ObjectMapper createObjectMapper() {
-    ObjectMapper mapper = JacksonUtil.createBasicObjectMapper();
+    ObjectMapper mapper = new ObjectMapper();
     mapper.setAnnotationIntrospector(
         new JaxbAnnotationIntrospector(TypeFactory.defaultInstance()));
     mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
@@ -230,9 +230,10 @@ public class PluginStoreTestUtils {
   static void writeEntities(TimelineEntities entities, Path logPath,
       FileSystem fs) throws IOException {
     FSDataOutputStream outStream = createLogFile(logPath, fs);
-    ObjectMapper objMapper = createObjectMapper();
-    JsonGenerator jsonGenerator = objMapper.createGenerator((OutputStream)outStream);
+    JsonGenerator jsonGenerator
+        = new JsonFactory().createGenerator((OutputStream)outStream);
     jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
+    ObjectMapper objMapper = createObjectMapper();
     for (TimelineEntity entity : entities.getEntities()) {
       objMapper.writeValue(jsonGenerator, entity);
     }

+ 1 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java

@@ -23,7 +23,6 @@ import java.io.IOException;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.hadoop.util.JacksonUtil;
 
 /**
  * A simple util class for Json SerDe.
@@ -32,7 +31,7 @@ public final class JsonUtils {
 
   private JsonUtils(){}
 
-  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+  private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 
   static {
     OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);

+ 4 - 4
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java

@@ -46,7 +46,6 @@ import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.util.JacksonUtil;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
@@ -105,10 +104,11 @@ public class FileSystemTimelineReaderImpl extends AbstractService
     return rootPath.toString();
   }
 
-  private static final ObjectMapper OBJECT_MAPPER = JacksonUtil.createBasicObjectMapper();
+  private static ObjectMapper mapper;
 
   static {
-    YarnJacksonJaxbJsonProvider.configObjectMapper(OBJECT_MAPPER);
+    mapper = new ObjectMapper();
+    YarnJacksonJaxbJsonProvider.configObjectMapper(mapper);
   }
 
   /**
@@ -127,7 +127,7 @@ public class FileSystemTimelineReaderImpl extends AbstractService
   public static <T> T getTimelineRecordFromJSON(
       String jsonString, Class<T> clazz)
       throws JsonGenerationException, JsonMappingException, IOException {
-    return OBJECT_MAPPER.readValue(jsonString, clazz);
+    return mapper.readValue(jsonString, clazz);
   }
 
   private static void fillFields(TimelineEntity finalEntity,