瀏覽代碼

Merge -r 703922:703923 from trunk to main to move the change log of HADOOP-4228 into release 0.18.2 section

git-svn-id: https://svn.apache.org/repos/asf/hadoop/core/branches/branch-0.19@703927 13f79535-47bb-0310-9956-ffa450edef68
Hairong Kuang 17 年之前
父節點
當前提交
f5b1191fb6

+ 3 - 0
CHANGES.txt

@@ -845,6 +845,9 @@ Release 0.18.2 - Unreleased
     HADOOP-4314. Simulated datanodes should not include blocks that are still
     being written in their block report. (Raghu Angadi)
 
+    HADOOP-4228. dfs datanoe metrics, bytes_read and bytes_written, overflow
+    due to incorrect type used. (hairong)
+
 Release 0.18.1 - 2008-09-17
 
   IMPROVEMENTS

+ 5 - 4
src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java

@@ -24,6 +24,7 @@ import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.Updater;
 import org.apache.hadoop.metrics.jvm.JvmMetrics;
 import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
+import org.apache.hadoop.metrics.util.MetricsLongValue;
 import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
 
 
@@ -44,10 +45,10 @@ public class DataNodeMetrics implements Updater {
   private DataNodeStatistics datanodeStats;
   
   
-  public MetricsTimeVaryingInt bytesWritten = 
-                      new MetricsTimeVaryingInt("bytes_written");
-  public MetricsTimeVaryingInt bytesRead = 
-                      new MetricsTimeVaryingInt("bytes_read");
+  public MetricsLongValue bytesWritten = 
+                      new MetricsLongValue("bytes_written");
+  public MetricsLongValue bytesRead = 
+                      new MetricsLongValue("bytes_read");
   public MetricsTimeVaryingInt blocksWritten = 
                       new MetricsTimeVaryingInt("blocks_written");
   public MetricsTimeVaryingInt blocksRead = 

+ 10 - 3
src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeStatistics.java

@@ -34,7 +34,7 @@ public class DataNodeStatistics implements DataNodeStatisticsMBean {
    * This constructs and registers the DataNodeStatisticsMBean
    * @param dataNodeMetrics - the metrics from which the mbean gets its info
    */
-  DataNodeStatistics(DataNodeMetrics dataNodeMetrics, String storageId) {
+  public DataNodeStatistics(DataNodeMetrics dataNodeMetrics, String storageId) {
     myMetrics = dataNodeMetrics;
     String serverName;
     if (storageId.equals("")) {// Temp fix for the uninitialized storage
@@ -92,10 +92,17 @@ public class DataNodeStatistics implements DataNodeStatisticsMBean {
   /**
    * @inheritDoc
    */
-  public int getBytesRead() {
-    return myMetrics.bytesRead.getPreviousIntervalValue();
+  public long getBytesRead() {
+    return myMetrics.bytesRead.get();
   }
 
+  /**
+   *   {@inheritDoc}
+   */
+  public long getBytesWritten() {
+    return myMetrics.bytesWritten.get();
+  }
+  
   /**
    * @inheritDoc
    */

+ 7 - 1
src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeStatisticsMBean.java

@@ -44,11 +44,17 @@ package org.apache.hadoop.hdfs.server.datanode.metrics;
  */
 public interface DataNodeStatisticsMBean {
   
+  /**
+   *   Number of bytes written in the last interval
+   * @return number of bytes written
+   */
+  long getBytesWritten();
+  
   /**
    *   Number of bytes read in the last interval
    * @return number of bytes read
    */
-  int getBytesRead();
+  long getBytesRead();
   
   /**
    *   Number of blocks written in the last interval

+ 53 - 0
src/test/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java

@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.datanode;
+
+import java.util.List;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSTestUtil;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
+import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeStatistics;
+import org.apache.hadoop.conf.Configuration;
+import junit.framework.TestCase;
+
+public class TestDataNodeMetrics extends TestCase {
+  
+  public void testDataNodeMetrics() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
+    MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
+    try {
+      FileSystem fs = cluster.getFileSystem();
+      final long LONG_FILE_LEN = Integer.MAX_VALUE+1L; 
+      DFSTestUtil.createFile(fs, new Path("/tmp.txt"),
+          LONG_FILE_LEN, (short)1, 1L);
+      List<DataNode> datanodes = cluster.getDataNodes();
+      assertEquals(datanodes.size(), 1);
+      DataNode datanode = datanodes.get(0);
+      DataNodeMetrics metrics = datanode.getMetrics();
+      DataNodeStatistics statistics = new DataNodeStatistics(
+          metrics, datanode.dnRegistration.storageID);
+      assertEquals(LONG_FILE_LEN, statistics.getBytesWritten());
+    } finally {
+      if (cluster != null) {cluster.shutdown();}
+    }
+  }
+}