Kaynağa Gözat

HADOOP-18917. Upgrade to commons-io 2.14.0 (#6133) (#6151). Contributed by PJ Fanning.

PJ Fanning 1 yıl önce
ebeveyn
işleme
09c4f50364

+ 1 - 1
LICENSE-binary

@@ -250,7 +250,7 @@ commons-cli:commons-cli:1.2
 commons-codec:commons-codec:1.11
 commons-collections:commons-collections:3.2.2
 commons-daemon:commons-daemon:1.0.13
-commons-io:commons-io:2.8.0
+commons-io:commons-io:2.14.0
 commons-logging:commons-logging:1.1.3
 commons-net:commons-net:3.9.0
 de.ruedigermoeller:fst:2.50

+ 6 - 11
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HostRestrictingAuthorizationFilter.java

@@ -117,17 +117,12 @@ public class HostRestrictingAuthorizationFilter implements Filter {
       String rulePath = rule.getPath();
       LOG.trace("Evaluating rule, subnet: {}, path: {}",
           subnet != null ? subnet.getCidrSignature() : "*", rulePath);
-      try {
-        if ((subnet == null || subnet.isInRange(remoteIp))
-            && FilenameUtils.directoryContains(rulePath, path)) {
-          LOG.debug("Found matching rule, subnet: {}, path: {}; returned true",
-              rule.getSubnet() != null ? subnet.getCidrSignature() : null,
-              rulePath);
-          return true;
-        }
-      } catch (IOException e) {
-        LOG.warn("Got IOException {}; returned false", e);
-        return false;
+      if ((subnet == null || subnet.isInRange(remoteIp))
+          && FilenameUtils.directoryContains(rulePath, path)) {
+        LOG.debug("Found matching rule, subnet: {}, path: {}; returned true",
+            rule.getSubnet() != null ? subnet.getCidrSignature() : null,
+            rulePath);
+        return true;
       }
     }
 

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java

@@ -257,7 +257,7 @@ public class TestSnapshot {
         FSImageTestUtil.getFSImage(
         cluster.getNameNode()).getStorage().getStorageDir(0));
     assertNotNull("Didn't generate or can't find fsimage", originalFsimage);
-    PrintStream o = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
+    PrintStream o = new PrintStream(NullOutputStream.INSTANCE);
     PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o);
     v.visit(new RandomAccessFile(originalFsimage, "r"));
   }

+ 1 - 1
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java

@@ -391,7 +391,7 @@ public class TestOfflineImageViewer {
   @Test(expected = IOException.class)
   public void testTruncatedFSImage() throws IOException {
     File truncatedFile = new File(tempDir, "truncatedFsImage");
-    PrintStream output = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
+    PrintStream output = new PrintStream(NullOutputStream.INSTANCE);
     copyPartOfFile(originalFsimage, truncatedFile);
     try (RandomAccessFile r = new RandomAccessFile(truncatedFile, "r")) {
       new FileDistributionCalculator(new Configuration(), 0, 0, false, output)

+ 1 - 1
hadoop-project/pom.xml

@@ -127,7 +127,7 @@
     <commons-collections.version>3.2.2</commons-collections.version>
     <commons-compress.version>1.24.0</commons-compress.version>
     <commons-csv.version>1.9.0</commons-csv.version>
-    <commons-io.version>2.8.0</commons-io.version>
+    <commons-io.version>2.14.0</commons-io.version>
     <commons-lang3.version>3.12.0</commons-lang3.version>
     <commons-logging.version>1.1.3</commons-logging.version>
     <commons-logging-api.version>1.1</commons-logging-api.version>

+ 5 - 1
hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java

@@ -54,6 +54,7 @@ import org.apache.hadoop.yarn.util.ConverterUtils;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -505,7 +506,10 @@ public class HadoopArchiveLogs implements Tool {
     String classpath = halrJarPath + File.pathSeparator + harJarPath;
     FileWriterWithEncoding fw = null;
     try {
-      fw = new FileWriterWithEncoding(localScript, "UTF-8");
+      fw = FileWriterWithEncoding.builder()
+              .setFile(localScript)
+              .setCharset(StandardCharsets.UTF_8)
+              .get();
       fw.write("#!/bin/bash\nset -e\nset -x\n");
       int containerCount = 1;
       for (AppInfo context : eligibleApplications) {

+ 3 - 3
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestTextFileBasedIdentityHandler.java

@@ -19,9 +19,9 @@
 package org.apache.hadoop.fs.azurebfs.services;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.charset.Charset;
+import java.nio.file.NoSuchFileException;
 
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -114,7 +114,7 @@ public class TestTextFileBasedIdentityHandler {
   public void testLookupForUserFileNotFound() throws Exception {
     TextFileBasedIdentityHandler handler =
         new TextFileBasedIdentityHandler(userMappingFile.getPath() + ".test", groupMappingFile.getPath());
-    intercept(FileNotFoundException.class, "FileNotFoundException",
+    intercept(NoSuchFileException.class, "NoSuchFileException",
         () -> handler.lookupForLocalUserIdentity(testUserDataLine3.split(":")[0]));
   }
 
@@ -143,7 +143,7 @@ public class TestTextFileBasedIdentityHandler {
   public void testLookupForGroupFileNotFound() throws Exception {
     TextFileBasedIdentityHandler handler =
         new TextFileBasedIdentityHandler(userMappingFile.getPath(), groupMappingFile.getPath() + ".test");
-    intercept(FileNotFoundException.class, "FileNotFoundException",
+    intercept(NoSuchFileException.class, "NoSuchFileException",
         () -> handler.lookupForLocalGroupIdentity(testGroupDataLine2.split(":")[0]));
   }
 }

+ 5 - 1
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java

@@ -32,6 +32,7 @@ import java.io.OutputStream;
 import java.io.PrintStream;
 import java.io.Writer;
 import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -783,7 +784,10 @@ public class AggregatedLogFormat {
       OutputStream os = null;
       PrintStream ps = null;
       try {
-        os = new WriterOutputStream(writer, Charset.forName("UTF-8"));
+        os = WriterOutputStream.builder()
+                .setWriter(writer)
+                .setCharset(StandardCharsets.UTF_8)
+                .get();
         ps = new PrintStream(os);
         while (true) {
           try {

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java

@@ -495,8 +495,9 @@ public class TestLeveldbTimelineStore extends TimelineStoreTestUtils {
       store.init(conf);
       Mockito.verify(factory, Mockito.times(1))
           .repair(Mockito.any(File.class), Mockito.any(Options.class));
-      FileFilter fileFilter = new WildcardFileFilter(
-          "*" + LeveldbTimelineStore.BACKUP_EXT +"*");
+      FileFilter fileFilter = WildcardFileFilter.builder()
+              .setWildcards("*" + LeveldbTimelineStore.BACKUP_EXT +"*")
+              .get();
       Assert.assertTrue(path.listFiles(fileFilter).length > 0);
     } finally {
       store.close();

+ 3 - 2
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestRollingLevelDBTimelineStore.java

@@ -443,8 +443,9 @@ public class TestRollingLevelDBTimelineStore extends TimelineStoreTestUtils {
       store.init(conf);
       Mockito.verify(factory, Mockito.times(1))
           .repair(Mockito.any(File.class), Mockito.any(Options.class));
-      FilenameFilter fileFilter =
-          new WildcardFileFilter("*" + RollingLevelDBTimelineStore.BACKUP_EXT + "*");
+      FilenameFilter fileFilter = WildcardFileFilter.builder()
+              .setWildcards("*" + RollingLevelDBTimelineStore.BACKUP_EXT + "*")
+              .get();
       Assert.assertTrue(new File(path.getAbsolutePath(), RollingLevelDBTimelineStore.FILENAME)
           .list(fileFilter).length > 0);
     } finally {