소스 검색

HADOOP-890. Replace dashes in metric names with underscores. Contributed by Nigel.

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk@496859 13f79535-47bb-0310-9956-ffa450edef68
Doug Cutting 18 년 전
부모
커밋
d77ac32e88

+ 4 - 0
CHANGES.txt

@@ -21,6 +21,10 @@ Trunk (unreleased changes)
     the TreeSet of block locations with an ArrayList.
     (Raghu Angadi via cutting)
 
+ 6. HADOOP-890.  Replace dashes in metric names with underscores,
+    for better compatibility with some monitoring systems.
+    (Nigel Daley via cutting)
+
 
 Release 0.10.1 - 2007-01-10
 

+ 6 - 6
src/java/org/apache/hadoop/dfs/DataNode.java

@@ -132,32 +132,32 @@ public class DataNode implements FSConstants, Runnable {
       
       synchronized void readBytes(int nbytes) {
         bytesRead += nbytes;
-        Metrics.report(metricsRecord, "bytes-read", bytesRead);
+        Metrics.report(metricsRecord, "bytes_read", bytesRead);
       }
       
       synchronized void wroteBytes(int nbytes) {
         bytesWritten += nbytes;
-        Metrics.report(metricsRecord, "bytes-written", bytesWritten);
+        Metrics.report(metricsRecord, "bytes_written", bytesWritten);
       }
       
       synchronized void readBlocks(int nblocks) {
         blocksRead += nblocks;
-        Metrics.report(metricsRecord, "blocks-read", blocksRead);
+        Metrics.report(metricsRecord, "blocks_read", blocksRead);
       }
       
       synchronized void wroteBlocks(int nblocks) {
         blocksWritten += nblocks;
-        Metrics.report(metricsRecord, "blocks-written", blocksWritten);
+        Metrics.report(metricsRecord, "blocks_written", blocksWritten);
       }
       
       synchronized void replicatedBlocks(int nblocks) {
         blocksReplicated += nblocks;
-        Metrics.report(metricsRecord, "blocks-replicated", blocksReplicated);
+        Metrics.report(metricsRecord, "blocks_replicated", blocksReplicated);
       }
       
       synchronized void removedBlocks(int nblocks) {
         blocksRemoved += nblocks;
-        Metrics.report(metricsRecord, "blocks-removed", blocksRemoved);
+        Metrics.report(metricsRecord, "blocks_removed", blocksRemoved);
       }
     }
     

+ 1 - 1
src/java/org/apache/hadoop/dfs/FSDirectory.java

@@ -223,7 +223,7 @@ class FSDirectory implements FSConstants {
                     v.add(blocks[i]);
                 }
             }
-            Metrics.report(metricsRecord, "files-deleted", ++numFilesDeleted);
+            Metrics.report(metricsRecord, "files_deleted", ++numFilesDeleted);
             for (Iterator it = children.values().iterator(); it.hasNext(); ) {
                 INode child = (INode) it.next();
                 child.collectSubtreeBlocks(v);

+ 4 - 4
src/java/org/apache/hadoop/dfs/NameNode.java

@@ -113,20 +113,20 @@ public class NameNode implements ClientProtocol, DatanodeProtocol, FSConstants {
       }
       
       synchronized void createFile() {
-        Metrics.report(metricsRecord, "files-created", ++numFilesCreated);
+        Metrics.report(metricsRecord, "files_created", ++numFilesCreated);
       }
       
       synchronized void openFile() {
-        Metrics.report(metricsRecord, "files-opened", ++numFilesOpened);
+        Metrics.report(metricsRecord, "files_opened", ++numFilesOpened);
       }
       
       synchronized void renameFile() {
-        Metrics.report(metricsRecord, "files-renamed", ++numFilesRenamed);
+        Metrics.report(metricsRecord, "files_renamed", ++numFilesRenamed);
       }
       
       synchronized void listFile(int nfiles) {
         numFilesListed += nfiles;
-        Metrics.report(metricsRecord, "files-listed", numFilesListed);
+        Metrics.report(metricsRecord, "files_listed", numFilesListed);
       }
     }
     

+ 6 - 6
src/java/org/apache/hadoop/mapred/JobTracker.java

@@ -386,32 +386,32 @@ public class JobTracker implements MRConstants, InterTrackerProtocol, JobSubmiss
       }
       
       synchronized void launchMap() {
-        Metrics.report(metricsRecord, "maps-launched",
+        Metrics.report(metricsRecord, "maps_launched",
             ++numMapTasksLaunched);
       }
       
       synchronized void completeMap() {
-        Metrics.report(metricsRecord, "maps-completed",
+        Metrics.report(metricsRecord, "maps_completed",
             ++numMapTasksCompleted);
       }
       
       synchronized void launchReduce() {
-        Metrics.report(metricsRecord, "reduces-launched",
+        Metrics.report(metricsRecord, "reduces_launched",
             ++numReduceTasksLaunched);
       }
       
       synchronized void completeReduce() {
-        Metrics.report(metricsRecord, "reduces-completed",
+        Metrics.report(metricsRecord, "reduces_completed",
             ++numReduceTasksCompleted);
       }
       
       synchronized void submitJob() {
-        Metrics.report(metricsRecord, "jobs-submitted",
+        Metrics.report(metricsRecord, "jobs_submitted",
             ++numJobsSubmitted);
       }
       
       synchronized void completeJob() {
-        Metrics.report(metricsRecord, "jobs-completed",
+        Metrics.report(metricsRecord, "jobs_completed",
             ++numJobsCompleted);
       }
     }

+ 4 - 4
src/java/org/apache/hadoop/mapred/MapTask.java

@@ -71,15 +71,15 @@ class MapTask extends Task {
     }
     
     synchronized void mapInput(long numBytes) {
-      Metrics.report(metricsRecord, "input-records", ++numInputRecords);
+      Metrics.report(metricsRecord, "input_records", ++numInputRecords);
       numInputBytes += numBytes;
-      Metrics.report(metricsRecord, "input-bytes", numInputBytes);
+      Metrics.report(metricsRecord, "input_bytes", numInputBytes);
     }
     
     synchronized void mapOutput(long numBytes) {
-      Metrics.report(metricsRecord, "output-records", ++numOutputRecords);
+      Metrics.report(metricsRecord, "output_records", ++numOutputRecords);
       numOutputBytes += numBytes;
-      Metrics.report(metricsRecord, "output-bytes", numOutputBytes);
+      Metrics.report(metricsRecord, "output_bytes", numOutputBytes);
     }
     
   }

+ 2 - 2
src/java/org/apache/hadoop/mapred/ReduceTask.java

@@ -54,11 +54,11 @@ class ReduceTask extends Task {
     }
     
     synchronized void reduceInput() {
-      Metrics.report(metricsRecord, "input-records", ++numInputRecords);
+      Metrics.report(metricsRecord, "input_records", ++numInputRecords);
     }
     
     synchronized void reduceOutput() {
-      Metrics.report(metricsRecord, "output-records", ++numOutputRecords);
+      Metrics.report(metricsRecord, "output_records", ++numOutputRecords);
     }
   }