Browse Source

HDFS-6020. Fix the five findbugs warnings. Contributed by Kihwal Lee.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-5535@1572165 13f79535-47bb-0310-9956-ffa450edef68
Kihwal Lee 11 years ago
parent
commit
c8182ea764

+ 1 - 0
hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt

@@ -104,3 +104,4 @@ HDFS-5535 subtasks:
     TestRollingUpgrade and TestRollingUpgradeRollback. (Haohui Mai via
     Arpit Agarwal)
 
+    HDFS-6020. Fix the five findbugs warnings. (kihwal)

+ 3 - 2
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

@@ -752,8 +752,9 @@ class BlockReceiver implements Closeable {
             File blockFile = ((ReplicaInPipeline)replicaInfo).getBlockFile();
             File restartMeta = new File(blockFile.getParent()  + 
                 File.pathSeparator + "." + blockFile.getName() + ".restart");
-            if (restartMeta.exists()) {
-              restartMeta.delete();
+            if (restartMeta.exists() && !restartMeta.delete()) {
+              LOG.warn("Failed to delete restart meta file: " +
+                  restartMeta.getPath());
             }
             try {
               FileWriter out = new FileWriter(restartMeta);

+ 12 - 4
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java

@@ -201,23 +201,28 @@ class BlockPoolSlice {
    */
   void saveDfsUsed() {
     File outFile = new File(currentDir, DU_CACHE_FILE);
-    if (outFile.exists()) {
-      outFile.delete();
+    if (outFile.exists() && !outFile.delete()) {
+      FsDatasetImpl.LOG.warn("Failed to delete old dfsUsed file in " +
+        outFile.getParent());
     }
 
+    FileWriter out = null;
     try {
       long used = getDfsUsed();
       if (used > 0) {
-        FileWriter out = new FileWriter(outFile);
+        out = new FileWriter(outFile);
         // mtime is written last, so that truncated writes won't be valid.
         out.write(Long.toString(used) + " " + Long.toString(Time.now()));
         out.flush();
         out.close();
+        out = null;
       }
     } catch (IOException ioe) {
       // If write failed, the volume might be bad. Since the cache file is
       // not critical, log the error and continue.
       FsDatasetImpl.LOG.warn("Failed to write dfsUsed to " + outFile, ioe);
+    } finally {
+      IOUtils.cleanup(null, out);
     }
   }
 
@@ -297,7 +302,10 @@ class BlockPoolSlice {
             loadRwr = false;
           }
           sc.close();
-          restartMeta.delete();
+          if (restartMeta.delete()) {
+            FsDatasetImpl.LOG.warn("Failed to delete restart meta file: " +
+              restartMeta.getPath());
+          }
         } catch (FileNotFoundException fnfe) {
           // nothing to do here
         } finally {