瀏覽代碼

HADOOP-1946. The Datanode code does not need to invoke du on
every heartbeat. (Hairong Kuang via dhruba)



git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@581028 13f79535-47bb-0310-9956-ffa450edef68

Dhruba Borthakur 18 年之前
父節點
當前提交
f2da2498b6

+ 3 - 0
CHANGES.txt

@@ -94,6 +94,9 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    HADOOP-1946.  The Datanode code does not need to invoke du on
+    every heartbeat.  (Hairong Kuang via dhruba)
+
     HADOOP-1935. Fix a NullPointerException in internalReleaseCreate.
     (Dhruba Borthakur)
 

+ 0 - 92
src/java/org/apache/hadoop/fs/Command.java

@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.fs;
-
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.BufferedReader;
-
-/** A base class for running a unix command like du or df*/
-abstract public class Command {
-  /** Run a command */
-  protected void run() throws IOException { 
-    Process process;
-    process = Runtime.getRuntime().exec(getExecString());
-
-    try {
-      if (process.waitFor() != 0) {
-        throw new IOException
-          (new BufferedReader(new InputStreamReader(process.getErrorStream()))
-           .readLine());
-      }
-      parseExecResult(new BufferedReader(
-          new InputStreamReader(process.getInputStream())));
-    } catch (InterruptedException e) {
-      throw new IOException(e.toString());
-    } finally {
-      process.destroy();
-    }
-  }
-
-  /** return an array comtaining the command name & its parameters */ 
-  protected abstract String[] getExecString();
-  
-  /** Parse the execution result */
-  protected abstract void parseExecResult(BufferedReader lines)
-  throws IOException;
-
-  /// A simple implementation of Command
-  private static class SimpleCommandExecutor extends Command {
-    
-    private String[] command;
-    private StringBuffer reply;
-    
-    SimpleCommandExecutor(String[] execString) {
-      command = execString;
-    }
-
-    @Override
-    protected String[] getExecString() {
-      return command;
-    }
-
-    @Override
-    protected void parseExecResult(BufferedReader lines) throws IOException {
-      reply = new StringBuffer();
-      char[] buf = new char[512];
-      int nRead;
-      while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
-        reply.append(buf, 0, nRead);
-      }
-    }
-    
-    String getReply() {
-      return (reply == null) ? "" : reply.toString();
-    }
-  }
-  
-  /** 
-   * Static method to execute a command. Covers most of the simple cases 
-   * without requiring the user to implement Command interface.
-   */
-  public static String execCommand(String[] cmd) throws IOException {
-    SimpleCommandExecutor exec = new SimpleCommandExecutor(cmd);
-    exec.run();
-    return exec.getReply();
-  }
-}

+ 8 - 21
src/java/org/apache/hadoop/fs/DF.java

@@ -19,7 +19,6 @@ package org.apache.hadoop.fs;
 
 import java.io.File;
 import java.io.IOException;
-import java.io.InputStreamReader;
 import java.io.BufferedReader;
 
 import java.util.StringTokenizer;
@@ -28,13 +27,10 @@ import org.apache.hadoop.conf.Configuration;
 
 /** Filesystem disk space usage statistics.  Uses the unix 'df' program.
  * Tested on Linux, FreeBSD, Cygwin. */
-public class DF extends Command {
+public class DF extends ShellCommand {
   public static final long DF_INTERVAL_DEFAULT = 3 * 1000; // default DF refresh interval 
   
   private String  dirPath;
-  private long    dfInterval;	// DF refresh interval in msec
-  private long    lastDF;		// last time doDF() was performed
-  
   private String filesystem;
   private long capacity;
   private long used;
@@ -47,18 +43,10 @@ public class DF extends Command {
   }
 
   public DF(File path, long dfInterval) throws IOException {
+    super(dfInterval);
     this.dirPath = path.getCanonicalPath();
-    this.dfInterval = dfInterval;
-    lastDF = (dfInterval < 0) ? 0 : -dfInterval;
-    this.doDF();
   }
   
-  private void doDF() throws IOException { 
-    if (lastDF + dfInterval > System.currentTimeMillis())
-      return;
-    super.run();
-  }
-
   /// ACCESSORS
 
   public String getDirPath() {
@@ -66,32 +54,32 @@ public class DF extends Command {
   }
   
   public String getFilesystem() throws IOException { 
-    doDF(); 
+    run(); 
     return filesystem; 
   }
   
   public long getCapacity() throws IOException { 
-    doDF(); 
+    run(); 
     return capacity; 
   }
   
   public long getUsed() throws IOException { 
-    doDF(); 
+    run(); 
     return used;
   }
   
   public long getAvailable() throws IOException { 
-    doDF(); 
+    run(); 
     return available;
   }
   
   public int getPercentUsed() throws IOException {
-    doDF();
+    run();
     return percentUsed;
   }
 
   public String getMount() throws IOException {
-    doDF();
+    run();
     return mount;
   }
   
@@ -133,7 +121,6 @@ public class DF extends Command {
     this.available = Long.parseLong(tokens.nextToken()) * 1024;
     this.percentUsed = Integer.parseInt(tokens.nextToken());
     this.mount = tokens.nextToken();
-    this.lastDF = System.currentTimeMillis();
   }
 
   public static void main(String[] args) throws Exception {

+ 3 - 9
src/java/org/apache/hadoop/fs/DU.java

@@ -25,17 +25,14 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.dfs.FSConstants;
 
 /** Filesystem disk space usage statistics.  Uses the unix 'du' program*/
-public class DU extends Command {
+public class DU extends ShellCommand {
   private String  dirPath;
-  private long    duInterval; // DU refresh interval in msec
-  private long    lastDU;   // last time doDU() was performed
 
   private long used;
   
   public DU(File path, long interval) throws IOException {
+    super(interval);
     this.dirPath = path.getCanonicalPath();
-    this.duInterval = interval;
-    run();
   }
   
   public DU(File path, Configuration conf) throws IOException {
@@ -52,9 +49,7 @@ public class DU extends Command {
   }
   
   synchronized public long getUsed() throws IOException { 
-    if (lastDU + duInterval > System.currentTimeMillis()) {
-      run();
-    }
+    run();
     return used;
   }
 
@@ -83,7 +78,6 @@ public class DU extends Command {
       throw new IOException("Illegal du output");
     }
     this.used = Long.parseLong(tokens[0])*1024;
-    this.lastDU = System.currentTimeMillis();
   }
 
   public static void main(String[] args) throws Exception {

+ 1 - 1
src/java/org/apache/hadoop/fs/FileUtil.java

@@ -257,7 +257,7 @@ public class FileUtil {
   /**
    * This class is only used on windows to invoke the cygpath command.
    */
-  private static class CygPathCommand extends Command {
+  private static class CygPathCommand extends ShellCommand {
     String[] command;
     String result;
     CygPathCommand(String path) throws IOException {

+ 112 - 0
src/java/org/apache/hadoop/fs/ShellCommand.java

@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+
+/** A base class for running a unix command like du or df*/
+abstract public class ShellCommand {
+  private long    interval;   // refresh interval in msec
+  private long    lastTime;   // last time the command was performed
+  
+  ShellCommand() {
+    this(0L);
+  }
+  
+  ShellCommand( long interval ) {
+    this.interval = interval;
+    this.lastTime = (interval<0) ? 0 : -interval;
+  }
+  
+  /** check to see if a command needs to be execuated */
+  protected void run() throws IOException {
+    if (lastTime + interval > System.currentTimeMillis())
+      return;
+    runCommand();
+  }
+
+  /** Run a command */
+  private void runCommand() throws IOException { 
+    Process process;
+    process = Runtime.getRuntime().exec(getExecString());
+
+    try {
+      if (process.waitFor() != 0) {
+        throw new IOException
+          (new BufferedReader(new InputStreamReader(process.getErrorStream()))
+           .readLine());
+      }
+      parseExecResult(new BufferedReader(
+          new InputStreamReader(process.getInputStream())));
+    } catch (InterruptedException e) {
+      throw new IOException(e.toString());
+    } finally {
+      process.destroy();
+      lastTime = System.currentTimeMillis();
+    }
+  }
+
+  /** return an array comtaining the command name & its parameters */ 
+  protected abstract String[] getExecString();
+  
+  /** Parse the execution result */
+  protected abstract void parseExecResult(BufferedReader lines)
+  throws IOException;
+
+  /// A simple implementation of Command
+  private static class SimpleCommandExecutor extends ShellCommand {
+    
+    private String[] command;
+    private StringBuffer reply;
+    
+    SimpleCommandExecutor(String[] execString) {
+      command = execString;
+    }
+
+    @Override
+    protected String[] getExecString() {
+      return command;
+    }
+
+    @Override
+    protected void parseExecResult(BufferedReader lines) throws IOException {
+      reply = new StringBuffer();
+      char[] buf = new char[512];
+      int nRead;
+      while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
+        reply.append(buf, 0, nRead);
+      }
+    }
+    
+    String getReply() {
+      return (reply == null) ? "" : reply.toString();
+    }
+  }
+  
+  /** 
+   * Static method to execute a command. Covers most of the simple cases 
+   * without requiring the user to implement Command interface.
+   */
+  public static String execCommand(String[] cmd) throws IOException {
+    SimpleCommandExecutor exec = new SimpleCommandExecutor(cmd);
+    exec.run();
+    return exec.getReply();
+  }
+}

+ 2 - 2
src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java

@@ -27,7 +27,7 @@ import java.util.TreeMap;
 import java.util.zip.CRC32;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Command;
+import org.apache.hadoop.fs.ShellCommand;
 import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.UTF8;
@@ -72,7 +72,7 @@ public class TestDFSUpgradeFromImage extends TestCase {
                  FileUtil.makeShellPath(dataDir) + "' ; tar -xf -)";
     LOG.info("Unpacking the tar file. Cmd : " + cmd);
     String[] shellCmd = { "bash", "-c", cmd };
-    Command.execCommand(shellCmd);
+    ShellCommand.execCommand(shellCmd);
     
     //Now read the reference info
     

+ 77 - 0
src/test/org/apache/hadoop/fs/TestDU.java

@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+import junit.framework.TestCase;
+
+/** This test makes sure that "DU" does not get to run on each call to getUsed */ 
+public class TestDU extends TestCase {
+  final static private File DU_DIR = new File(".");
+  final static private File DU_FILE1 = new File(DU_DIR, "tmp1");
+  final static private File DU_FILE2 = new File(DU_DIR, "tmp2");
+  
+  /** create a file of more than 1K size */
+  private void createFile( File newFile ) throws IOException {
+    newFile.createNewFile();
+    RandomAccessFile file = new RandomAccessFile(newFile, "rw");
+    file.seek(1024);
+    file.writeBytes("du test du test");
+    file.getFD().sync();
+    file.close();
+  }
+  
+  /** delete a file */
+  private void rmFile(File file) {
+    if(file.exists()) {
+      assertTrue(file.delete());
+    }
+  }
+
+  /* interval is in a unit of minutes */
+  private void testDU(long interval) throws IOException {
+    rmFile(DU_FILE1);
+    rmFile(DU_FILE2);
+    try {
+      createFile(DU_FILE1);
+      DU du = new DU(DU_DIR, interval*60000);
+      long oldUsedSpace = du.getUsed();
+      assertTrue(oldUsedSpace>0); // make sure that du is called
+      createFile(DU_FILE2);
+      if(interval>0) {
+        assertEquals( oldUsedSpace, du.getUsed());  // du does not get called
+      } else {
+        assertTrue( oldUsedSpace < du.getUsed());   // du gets called again
+      }
+    } finally {
+      rmFile(DU_FILE1);
+      rmFile(DU_FILE2);
+    }
+  }
+
+  public void testDU() throws Exception {
+    testDU(Long.MIN_VALUE/60000);  // test a negative interval
+    testDU(0L);  // test a zero interval
+    testDU(10L); // interval equal to 10mins
+    testDU(System.currentTimeMillis()/60000+60); // test a very big interval
+  }
+    
+}