Browse Source

HADOOP-16882. Update jackson-databind to 2.9.10.2 in branch-3.1, branch-2.10. Contributed by Lisheng Sun.

Wei-Chiu Chuang 5 years ago
parent
commit
58b025c8f4

+ 3 - 2
hadoop-project/pom.xml

@@ -69,7 +69,8 @@
 
     <!-- jackson versions -->
     <jackson.version>1.9.13</jackson.version>
-    <jackson2.version>2.7.8</jackson2.version>
+    <jackson2.version>2.9.10</jackson2.version>
+    <jackson2.databind.version>2.9.10.2</jackson2.databind.version>
 
     <!-- SLF4J version -->
     <slf4j.version>1.7.25</slf4j.version>
@@ -981,7 +982,7 @@
       <dependency>
         <groupId>com.fasterxml.jackson.core</groupId>
         <artifactId>jackson-databind</artifactId>
-        <version>${jackson2.version}</version>
+        <version>${jackson2.databind.version}</version>
       </dependency>
       <dependency>
         <groupId>com.fasterxml.jackson.core</groupId>

+ 4 - 4
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java

@@ -18,11 +18,11 @@
 package org.apache.hadoop.tools.rumen.state;
 
 import java.io.DataInput;
-import java.io.DataInputStream;
 import java.io.DataOutput;
-import java.io.DataOutputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
@@ -216,7 +216,7 @@ public class StatePool {
     // register the module with the object-mapper
     mapper.registerModule(module);
 
-    JsonParser parser = mapper.getFactory().createParser((DataInputStream)in);
+    JsonParser parser = mapper.getFactory().createParser((InputStream) in);
     StatePool statePool = mapper.readValue(parser, StatePool.class);
     this.setStates(statePool.getStates());
     parser.close();
@@ -285,7 +285,7 @@ public class StatePool {
 
     JsonFactory outFactory = outMapper.getFactory();
     JsonGenerator jGen =
-        outFactory.createGenerator((DataOutputStream)out, JsonEncoding.UTF8);
+        outFactory.createGenerator((OutputStream) out, JsonEncoding.UTF8);
     jGen.useDefaultPrettyPrinter();
 
     jGen.writeObject(this);

+ 3 - 2
hadoop-tools/hadoop-rumen/src/test/java/org/apache/hadoop/tools/rumen/TestHistograms.java

@@ -19,6 +19,7 @@
 package org.apache.hadoop.tools.rumen;
 import java.io.IOException;
 
+import java.io.OutputStream;
 import java.util.List;
 
 import com.fasterxml.jackson.core.JsonEncoding;
@@ -142,8 +143,8 @@ public class TestHistograms {
         ObjectMapper mapper = new ObjectMapper();
         JsonFactory factory = mapper.getFactory();
         FSDataOutputStream ostream = lfs.create(goldFilePath, true);
-        JsonGenerator gen = factory.createGenerator(ostream,
-            JsonEncoding.UTF8);
+        JsonGenerator gen =
+            factory.createGenerator((OutputStream) ostream, JsonEncoding.UTF8);
         gen.useDefaultPrettyPrinter();
         
         gen.writeObject(newResult);

+ 3 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java

@@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.client.api.impl;
 import java.io.Closeable;
 import java.io.Flushable;
 import java.io.IOException;
+import java.io.OutputStream;
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -374,7 +375,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{
 
     protected void prepareForWrite() throws IOException{
       this.stream = createLogFileStream(fs, logPath);
-      this.jsonGenerator = new JsonFactory().createGenerator(stream);
+      this.jsonGenerator =
+          new JsonFactory().createGenerator((OutputStream) stream);
       this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
       this.lastModifiedTime = Time.monotonicNow();
     }

+ 2 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/LogInfo.java

@@ -40,6 +40,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.util.ArrayList;
 
 abstract class LogInfo {
@@ -140,7 +141,7 @@ abstract class LogInfo {
     try {
       in.seek(offset);
       try {
-        parser = jsonFactory.createParser(in);
+        parser = jsonFactory.createParser((InputStream) in);
         parser.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false);
       } catch (IOException e) {
         // if app hasn't completed then there may be errors due to the

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/PluginStoreTestUtils.java

@@ -39,6 +39,7 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
 
 import java.io.IOException;
+import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EnumSet;
@@ -231,8 +232,8 @@ public class PluginStoreTestUtils {
   static void writeEntities(TimelineEntities entities, Path logPath,
       FileSystem fs) throws IOException {
     FSDataOutputStream outStream = createLogFile(logPath, fs);
-    JsonGenerator jsonGenerator
-        = new JsonFactory().createGenerator(outStream);
+    JsonGenerator jsonGenerator =
+        new JsonFactory().createGenerator((OutputStream) outStream);
     jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
     ObjectMapper objMapper = createObjectMapper();
     for (TimelineEntity entity : entities.getEntities()) {

+ 5 - 3
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java

@@ -39,6 +39,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.io.OutputStream;
 import java.nio.charset.Charset;
 import java.util.EnumSet;
 
@@ -232,7 +233,8 @@ public class TestLogInfo {
       throws IOException {
     if (outStream == null) {
       outStream = PluginStoreTestUtils.createLogFile(logPath, fs);
-      jsonGenerator = new JsonFactory().createGenerator(outStream);
+      jsonGenerator =
+          new JsonFactory().createGenerator((OutputStream) outStream);
       jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
     }
     for (TimelineEntity entity : entities.getEntities()) {
@@ -247,8 +249,8 @@ public class TestLogInfo {
       outStreamDomain = PluginStoreTestUtils.createLogFile(logPath, fs);
     }
     // Write domain uses its own json generator to isolate from entity writers
-    JsonGenerator jsonGeneratorLocal
-        = new JsonFactory().createGenerator(outStreamDomain);
+    JsonGenerator jsonGeneratorLocal =
+        new JsonFactory().createGenerator((OutputStream) outStreamDomain);
     jsonGeneratorLocal.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
     objMapper.writeValue(jsonGeneratorLocal, domain);
     outStreamDomain.hflush();