Browse Source

HADOOP-10104. Update jackson to 1.9.13 (Akira Ajisaka via stevel)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1585933 13f79535-47bb-0310-9956-ffa450edef68
Steve Loughran 11 years ago
parent
commit
770aef5053

+ 2 - 0
hadoop-common-project/hadoop-common/CHANGES.txt

@@ -19,6 +19,8 @@ Release 2.5.0 - UNRELEASED
     HADOOP-10454. Provide FileContext version of har file system. (Kihwal Lee
     via jeagles)
 
+    HADOOP-10104. Update jackson to 1.9.13 (Akira Ajisaka via stevel)
+
   OPTIMIZATIONS
 
   BUG FIXES 

+ 5 - 5
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java

@@ -358,8 +358,8 @@ class ClusterJspHelper {
       nn.missingBlocksCount = getProperty(props, "NumberOfMissingBlocks")
           .getLongValue();
       nn.httpAddress = httpAddress.toURL();
-      getLiveNodeCount(getProperty(props, "LiveNodes").getValueAsText(), nn);
-      getDeadNodeCount(getProperty(props, "DeadNodes").getValueAsText(), nn);
+      getLiveNodeCount(getProperty(props, "LiveNodes").asText(), nn);
+      getDeadNodeCount(getProperty(props, "DeadNodes").asText(), nn);
       nn.softwareVersion = getProperty(props, "SoftwareVersion").getTextValue();
       return nn;
     }
@@ -373,11 +373,11 @@ class ClusterJspHelper {
         Map<String, Map<String, String>> statusMap, String props)
         throws IOException, MalformedObjectNameException {
       getLiveNodeStatus(statusMap, host, getProperty(props, "LiveNodes")
-          .getValueAsText());
+          .asText());
       getDeadNodeStatus(statusMap, host, getProperty(props, "DeadNodes")
-          .getValueAsText());
+          .asText());
       getDecommissionNodeStatus(statusMap, host,
-          getProperty(props, "DecomNodes").getValueAsText());
+          getProperty(props, "DecomNodes").asText());
     }
   
     /**

+ 4 - 4
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestRMNMInfo.java

@@ -116,7 +116,7 @@ public class TestRMNMInfo {
       Assert.assertNotNull(n.get("HostName"));
       Assert.assertNotNull(n.get("Rack"));
       Assert.assertTrue("Node " + n.get("NodeId") + " should be RUNNING",
-              n.get("State").getValueAsText().contains("RUNNING"));
+              n.get("State").asText().contains("RUNNING"));
       Assert.assertNotNull(n.get("NodeHTTPAddress"));
       Assert.assertNotNull(n.get("LastHealthUpdate"));
       Assert.assertNotNull(n.get("HealthReport"));
@@ -124,10 +124,10 @@ public class TestRMNMInfo {
       Assert.assertNotNull(n.get("NumContainers"));
       Assert.assertEquals(
               n.get("NodeId") + ": Unexpected number of used containers",
-              0, n.get("NumContainers").getValueAsInt());
+              0, n.get("NumContainers").asInt());
       Assert.assertEquals(
               n.get("NodeId") + ": Unexpected amount of used memory",
-              0, n.get("UsedMemoryMB").getValueAsInt());
+              0, n.get("UsedMemoryMB").asInt());
       Assert.assertNotNull(n.get("AvailableMemoryMB"));
     }
   }
@@ -153,7 +153,7 @@ public class TestRMNMInfo {
       Assert.assertNotNull(n.get("HostName"));
       Assert.assertNotNull(n.get("Rack"));
       Assert.assertTrue("Node " + n.get("NodeId") + " should be RUNNING",
-              n.get("State").getValueAsText().contains("RUNNING"));
+              n.get("State").asText().contains("RUNNING"));
       Assert.assertNotNull(n.get("NodeHTTPAddress"));
       Assert.assertNotNull(n.get("LastHealthUpdate"));
       Assert.assertNotNull(n.get("HealthReport"));

+ 4 - 4
hadoop-project/pom.xml

@@ -625,22 +625,22 @@
       <dependency>
         <groupId>org.codehaus.jackson</groupId>
         <artifactId>jackson-mapper-asl</artifactId>
-        <version>1.8.8</version>
+        <version>1.9.13</version>
       </dependency>
       <dependency>
         <groupId>org.codehaus.jackson</groupId>
         <artifactId>jackson-core-asl</artifactId>
-        <version>1.8.8</version>
+        <version>1.9.13</version>
       </dependency>
       <dependency>
         <groupId>org.codehaus.jackson</groupId>
         <artifactId>jackson-jaxrs</artifactId>
-        <version>1.8.8</version>
+        <version>1.9.13</version>
       </dependency>
       <dependency>
         <groupId>org.codehaus.jackson</groupId>
         <artifactId>jackson-xc</artifactId>
-        <version>1.8.8</version>
+        <version>1.9.13</version>
       </dependency>
       <dependency>
         <groupId>org.mockito</groupId>

+ 1 - 1
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StateDeserializer.java

@@ -24,7 +24,7 @@ import org.codehaus.jackson.JsonParser;
 import org.codehaus.jackson.JsonProcessingException;
 import org.codehaus.jackson.map.DeserializationContext;
 import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.deser.StdDeserializer;
+import org.codehaus.jackson.map.deser.std.StdDeserializer;
 import org.codehaus.jackson.node.ObjectNode;
 
 /**

+ 2 - 2
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/RumenToSLSConverter.java

@@ -120,7 +120,7 @@ public class RumenToSLSConverter {
       Writer output = new FileWriter(outputFile);
       try {
         ObjectMapper mapper = new ObjectMapper();
-        ObjectWriter writer = mapper.defaultPrettyPrintingWriter();
+        ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         Iterator<Map> i = mapper.readValues(
                 new JsonFactory().createJsonParser(input), Map.class);
         while (i.hasNext()) {
@@ -141,7 +141,7 @@ public class RumenToSLSConverter {
     Writer output = new FileWriter(outputFile);
     try {
       ObjectMapper mapper = new ObjectMapper();
-      ObjectWriter writer = mapper.defaultPrettyPrintingWriter();
+      ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
       for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
         Map rack = new LinkedHashMap();
         rack.put("rack", entry.getKey());

+ 2 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java

@@ -43,8 +43,7 @@ public class TimelineUtils {
     mapper = new ObjectMapper();
     AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
     mapper.setAnnotationIntrospector(introspector);
-    mapper.getSerializationConfig()
-        .setSerializationInclusion(Inclusion.NON_NULL);
+    mapper.setSerializationInclusion(Inclusion.NON_NULL);
   }
 
   /**
@@ -77,7 +76,7 @@ public class TimelineUtils {
   public static String dumpTimelineRecordtoJSON(Object o, boolean pretty)
       throws JsonGenerationException, JsonMappingException, IOException {
     if (pretty) {
-      return mapper.defaultPrettyPrintingWriter().writeValueAsString(o);
+      return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(o);
     } else {
       return mapper.writeValueAsString(o);
     }

+ 1 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnJacksonJaxbJsonProvider.java

@@ -51,8 +51,7 @@ public class YarnJacksonJaxbJsonProvider extends JacksonJaxbJsonProvider {
     ObjectMapper mapper = super.locateMapper(type, mediaType);
     AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
     mapper.setAnnotationIntrospector(introspector);
-    mapper.getSerializationConfig()
-        .setSerializationInclusion(Inclusion.NON_NULL);
+    mapper.setSerializationInclusion(Inclusion.NON_NULL);
     return mapper;
   }
 }