浏览代码

commit 69743903766896b29354e8ecc1b60a15e74b57b8
Author: Arun C Murthy <acmurthy@apache.org>
Date: Fri Jul 23 18:58:10 2010 -0700

HADOOP-6728. Re-design and overhaul of the Metrics framework. Contributed by Luke Lu.

+++ b/YAHOO-CHANGES.txt
+ HADOOP-6728. Re-design and overhaul of the Metrics framework. (Luke Lu via
+ acmurthy)
+


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security-patches@1077597 13f79535-47bb-0310-9956-ffa450edef68

Owen O'Malley 14 年之前
父节点
当前提交
8c4d365301
共有 100 个文件被更改,包括 5970 次插入4488 次删除
  1. 0 54
      conf/hadoop-metrics.properties
  2. 3 0
      conf/hadoop-metrics2.properties
  3. 1 1
      conf/log4j.properties
  4. 10 0
      ivy.xml
  5. 4 3
      ivy/libraries.properties
  6. 3 19
      src/core/org/apache/hadoop/ipc/RPC.java
  7. 10 18
      src/core/org/apache/hadoop/ipc/Server.java
  8. 0 78
      src/core/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java
  9. 0 72
      src/core/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java
  10. 0 82
      src/core/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java
  11. 210 0
      src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java
  12. 0 152
      src/core/org/apache/hadoop/ipc/metrics/RpcMetrics.java
  13. 0 119
      src/core/org/apache/hadoop/ipc/metrics/RpcMgt.java
  14. 0 105
      src/core/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java
  15. 1 3
      src/core/org/apache/hadoop/log/EventCounter.java
  16. 0 193
      src/core/org/apache/hadoop/metrics/ContextFactory.java
  17. 0 107
      src/core/org/apache/hadoop/metrics/MetricsContext.java
  18. 0 246
      src/core/org/apache/hadoop/metrics/MetricsRecord.java
  19. 0 100
      src/core/org/apache/hadoop/metrics/MetricsUtil.java
  20. 0 151
      src/core/org/apache/hadoop/metrics/file/FileContext.java
  21. 0 43
      src/core/org/apache/hadoop/metrics/file/package.html
  22. 0 243
      src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java
  23. 0 74
      src/core/org/apache/hadoop/metrics/ganglia/package.html
  24. 0 191
      src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java
  25. 0 159
      src/core/org/apache/hadoop/metrics/package.html
  26. 0 427
      src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
  27. 0 186
      src/core/org/apache/hadoop/metrics/spi/CompositeContext.java
  28. 0 275
      src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java
  29. 0 83
      src/core/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java
  30. 0 72
      src/core/org/apache/hadoop/metrics/spi/OutputRecord.java
  31. 0 67
      src/core/org/apache/hadoop/metrics/spi/Util.java
  32. 0 36
      src/core/org/apache/hadoop/metrics/spi/package.html
  33. 0 226
      src/core/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java
  34. 0 104
      src/core/org/apache/hadoop/metrics/util/MetricsIntValue.java
  35. 0 88
      src/core/org/apache/hadoop/metrics/util/MetricsLongValue.java
  36. 0 85
      src/core/org/apache/hadoop/metrics/util/MetricsRegistry.java
  37. 0 128
      src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
  38. 0 124
      src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
  39. 0 196
      src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
  40. 108 0
      src/core/org/apache/hadoop/metrics2/Metric.java
  41. 38 0
      src/core/org/apache/hadoop/metrics2/MetricCounter.java
  42. 37 0
      src/core/org/apache/hadoop/metrics2/MetricGauge.java
  43. 9 9
      src/core/org/apache/hadoop/metrics2/MetricsBuilder.java
  44. 51 0
      src/core/org/apache/hadoop/metrics2/MetricsException.java
  45. 60 0
      src/core/org/apache/hadoop/metrics2/MetricsFilter.java
  46. 34 0
      src/core/org/apache/hadoop/metrics2/MetricsPlugin.java
  47. 55 0
      src/core/org/apache/hadoop/metrics2/MetricsRecord.java
  48. 121 0
      src/core/org/apache/hadoop/metrics2/MetricsRecordBuilder.java
  49. 37 0
      src/core/org/apache/hadoop/metrics2/MetricsSink.java
  50. 33 0
      src/core/org/apache/hadoop/metrics2/MetricsSource.java
  51. 101 0
      src/core/org/apache/hadoop/metrics2/MetricsSystem.java
  52. 52 0
      src/core/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
  53. 99 0
      src/core/org/apache/hadoop/metrics2/MetricsTag.java
  54. 68 0
      src/core/org/apache/hadoop/metrics2/MetricsVisitor.java
  55. 165 0
      src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java
  56. 34 0
      src/core/org/apache/hadoop/metrics2/filter/GlobFilter.java
  57. 33 0
      src/core/org/apache/hadoop/metrics2/filter/RegexFilter.java
  58. 26 0
      src/core/org/apache/hadoop/metrics2/impl/Consumer.java
  59. 107 0
      src/core/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java
  60. 17 28
      src/core/org/apache/hadoop/metrics2/impl/MetricCounterInt.java
  61. 41 0
      src/core/org/apache/hadoop/metrics2/impl/MetricCounterLong.java
  62. 41 0
      src/core/org/apache/hadoop/metrics2/impl/MetricGaugeDouble.java
  63. 41 0
      src/core/org/apache/hadoop/metrics2/impl/MetricGaugeFloat.java
  64. 41 0
      src/core/org/apache/hadoop/metrics2/impl/MetricGaugeInt.java
  65. 41 0
      src/core/org/apache/hadoop/metrics2/impl/MetricGaugeLong.java
  66. 56 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsBuffer.java
  67. 12 22
      src/core/org/apache/hadoop/metrics2/impl/MetricsBufferBuilder.java
  68. 67 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsBuilderImpl.java
  69. 202 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java
  70. 41 42
      src/core/org/apache/hadoop/metrics2/impl/MetricsConfigException.java
  71. 145 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java
  72. 73 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsRecordFiltered.java
  73. 111 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsRecordImpl.java
  74. 197 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
  75. 254 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java
  76. 514 0
      src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
  77. 162 0
      src/core/org/apache/hadoop/metrics2/impl/SinkQueue.java
  78. 60 0
      src/core/org/apache/hadoop/metrics2/lib/AbstractMetricsSource.java
  79. 99 0
      src/core/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
  80. 75 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutable.java
  81. 41 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableCounter.java
  82. 63 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableCounterInt.java
  83. 63 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableCounterLong.java
  84. 156 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableFactory.java
  85. 47 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableGauge.java
  86. 86 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableGaugeInt.java
  87. 86 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableGaugeLong.java
  88. 140 0
      src/core/org/apache/hadoop/metrics2/lib/MetricMutableStat.java
  89. 364 0
      src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java
  90. 319 0
      src/core/org/apache/hadoop/metrics2/package.html
  91. 83 0
      src/core/org/apache/hadoop/metrics2/sink/FileSink.java
  92. 191 0
      src/core/org/apache/hadoop/metrics2/source/JvmMetricsSource.java
  93. 127 0
      src/core/org/apache/hadoop/metrics2/util/Contracts.java
  94. 25 23
      src/core/org/apache/hadoop/metrics2/util/MBeans.java
  95. 167 0
      src/core/org/apache/hadoop/metrics2/util/SampleStat.java
  96. 120 0
      src/core/org/apache/hadoop/metrics2/util/TryIterator.java
  97. 56 0
      src/core/org/apache/hadoop/security/UgiInstrumentation.java
  98. 8 49
      src/core/org/apache/hadoop/security/UserGroupInformation.java
  99. 24 1
      src/core/org/apache/hadoop/util/StringUtils.java
  100. 4 4
      src/hdfs/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

+ 0 - 54
conf/hadoop-metrics.properties

@@ -1,54 +0,0 @@
-# Configuration of the "dfs" context for null
-dfs.class=org.apache.hadoop.metrics.spi.NullContext
-
-# Configuration of the "dfs" context for file
-#dfs.class=org.apache.hadoop.metrics.file.FileContext
-#dfs.period=10
-#dfs.fileName=/tmp/dfsmetrics.log
-
-# Configuration of the "dfs" context for ganglia
-# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-# dfs.period=10
-# dfs.servers=localhost:8649
-
-
-# Configuration of the "mapred" context for null
-mapred.class=org.apache.hadoop.metrics.spi.NullContext
-
-# Configuration of the "mapred" context for file
-#mapred.class=org.apache.hadoop.metrics.file.FileContext
-#mapred.period=10
-#mapred.fileName=/tmp/mrmetrics.log
-
-# Configuration of the "mapred" context for ganglia
-# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-# mapred.period=10
-# mapred.servers=localhost:8649
-
-
-# Configuration of the "jvm" context for null
-jvm.class=org.apache.hadoop.metrics.spi.NullContext
-
-# Configuration of the "jvm" context for file
-#jvm.class=org.apache.hadoop.metrics.file.FileContext
-#jvm.period=10
-#jvm.fileName=/tmp/jvmmetrics.log
-
-# Configuration of the "jvm" context for ganglia
-# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-# jvm.period=10
-# jvm.servers=localhost:8649
-
-
-# Configuration of the "ugi" context for null
-ugi.class=org.apache.hadoop.metrics.spi.NullContext
-
-# Configuration of the "ugi" context for file
-#ugi.class=org.apache.hadoop.metrics.file.FileContext
-#ugi.period=10
-#ugi.fileName=/tmp/ugimetrics.log
-
-# Configuration of the "ugi" context for ganglia
-# ugi.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-# ugi.period=10
-# ugi.servers=localhost:8649

+ 3 - 0
conf/hadoop-metrics2.properties

@@ -0,0 +1,3 @@
+#[prefix].[source|sink|jmx].[instance].[options]
+#namenode.sink.file0.class=org.apache.hadoop.metrics2.sink.FileSink
+#namenode.sink.file0.filename=nn.out

+ 1 - 1
conf/log4j.properties

@@ -115,7 +115,7 @@ log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop Metrics.
 #
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+log4j.appender.EventCounter=org.apache.hadoop.log.EventCounter
 
 #
 # Job Summary Appender

+ 10 - 0
ivy.xml

@@ -126,6 +126,16 @@
       rev="${commons-net.version}"
       conf="ftp->default"/>
 
+    <dependency org="commons-configuration"
+      name="commons-configuration"
+      rev="${commons-configuration.version}"
+      conf="common->default"/>
+
+    <dependency org="org.apache.commons"
+      name="commons-math"
+      rev="${commons-math.version}"
+      conf="common->default"/>
+
     <!--Configuration: Jetty -->
 
 <!-- <dependency org="javax.servlet"

+ 4 - 3
ivy/libraries.properties

@@ -27,11 +27,13 @@ checkstyle.version=4.2
 commons-cli.version=1.2
 commons-codec.version=1.4
 commons-collections.version=3.1
+commons-configuration.version=1.6
 commons-daemon.version=1.0.1
 commons-httpclient.version=3.0.1
 commons-lang.version=2.4
 commons-logging.version=1.0.4
 commons-logging-api.version=1.0.4
+commons-math.version=2.1
 commons-el.version=1.0
 commons-fileupload.version=1.2
 commons-io.version=1.4
@@ -41,8 +43,7 @@ coreplugin.version=1.3.2
 
 hsqldb.version=1.8.0.10
 
-#ivy.version=2.0.0-beta2
-ivy.version=2.0.0-rc2
+ivy.version=2.1.0
 
 jasper.version=5.5.12
 #not able to figureout the version of jsp & jsp-api version to get it resolved throught ivy
@@ -61,7 +62,7 @@ kfs.version=0.1
 log4j.version=1.2.15
 lucene-core.version=2.3.1
 
-mockito-all.version=1.8.0
+mockito-all.version=1.8.5
 
 oro.version=2.0.8
 

+ 3 - 19
src/core/org/apache/hadoop/ipc/RPC.java

@@ -41,7 +41,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.conf.*;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
 
 import org.apache.hadoop.net.NetUtils;
 
@@ -528,24 +527,9 @@ public class RPC {
                     " queueTime= " + qTime +
                     " procesingTime= " + processingTime);
         }
-        rpcMetrics.rpcQueueTime.inc(qTime);
-        rpcMetrics.rpcProcessingTime.inc(processingTime);
-
-        MetricsTimeVaryingRate m =
-         (MetricsTimeVaryingRate) rpcDetailedMetrics.registry.get(call.getMethodName());
-      	if (m == null) {
-      	  try {
-      	    m = new MetricsTimeVaryingRate(call.getMethodName(),
-      	                                        rpcDetailedMetrics.registry);
-      	  } catch (IllegalArgumentException iae) {
-      	    // the metrics has been registered; re-fetch the handle
-      	    LOG.info("Error register " + call.getMethodName(), iae);
-      	    m = (MetricsTimeVaryingRate) rpcDetailedMetrics.registry.get(
-      	        call.getMethodName());
-      	  }
-      	}
-        m.inc(processingTime);
-
+        rpcMetrics.addRpcQueueTime(qTime);
+        rpcMetrics.addRpcProcessingTime(processingTime);
+        rpcMetrics.addRpcProcessingTime(call.getMethodName(), processingTime);
         if (verbose) log("Return: "+value);
 
         return new ObjectWritable(method.getReturnType(), value);

+ 10 - 18
src/core/org/apache/hadoop/ipc/Server.java

@@ -66,8 +66,7 @@ import org.apache.hadoop.io.IntWritable;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.*;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
-import org.apache.hadoop.ipc.metrics.RpcMetrics;
+import org.apache.hadoop.ipc.metrics.RpcInstrumentation;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
@@ -187,8 +186,7 @@ public abstract class Server {
                                                   // connections to nuke
                                                   //during a cleanup
   
-  protected RpcMetrics rpcMetrics;
-  protected RpcDetailedMetrics rpcDetailedMetrics;
+  protected RpcInstrumentation rpcMetrics;
   
   private Configuration conf;
   private SecretManager<TokenIdentifier> secretManager;
@@ -245,7 +243,7 @@ public abstract class Server {
    * Returns a handle to the rpcMetrics (required in tests)
    * @return rpc metrics
    */
-  public RpcMetrics getRpcMetrics() {
+  public RpcInstrumentation getRpcMetrics() {
     return rpcMetrics;
   }
 
@@ -1019,7 +1017,7 @@ public abstract class Server {
           }
           doSaslReply(SaslStatus.ERROR, null, sendToClient.getClass().getName(), 
               sendToClient.getLocalizedMessage());
-          rpcMetrics.authenticationFailures.inc();
+          rpcMetrics.incrAuthenticationFailures();
           String clientIP = this.toString();
           // attempting user could be null
           AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser);
@@ -1039,7 +1037,7 @@ public abstract class Server {
           useWrap = qop != null && !"auth".equalsIgnoreCase(qop);
           user = getAuthorizedUgi(saslServer.getAuthorizationID());
           LOG.info("SASL server successfully authenticated client: " + user);
-          rpcMetrics.authenticationSuccesses.inc();
+          rpcMetrics.incrAuthenticationSuccesses();
           AUDITLOG.info(AUTH_SUCCESSFULL_FOR + user);
           saslContextEstablished = true;
         }
@@ -1319,9 +1317,9 @@ public abstract class Server {
         if (LOG.isDebugEnabled()) {
           LOG.debug("Successfully authorized " + header);
         }
-        rpcMetrics.authorizationSuccesses.inc();
+        rpcMetrics.incrAuthorizationSuccesses();
       } catch (AuthorizationException ae) {
-        rpcMetrics.authorizationFailures.inc();
+        rpcMetrics.incrAuthorizationFailures();
         setupResponse(authFailedResponse, authFailedCall, Status.FATAL, null,
             ae.getClass().getName(), ae.getMessage());
         responder.doRespond(authFailedCall);
@@ -1480,10 +1478,7 @@ public abstract class Server {
     // Start the listener here and let it bind to the port
     listener = new Listener();
     this.port = listener.getAddress().getPort();    
-    this.rpcMetrics = new RpcMetrics(serverName,
-                          Integer.toString(this.port), this);
-    this.rpcDetailedMetrics = new RpcDetailedMetrics(serverName,
-                            Integer.toString(this.port));
+    this.rpcMetrics = RpcInstrumentation.create(serverName, this.port);
     this.tcpNoDelay = conf.getBoolean("ipc.server.tcpnodelay", false);
 
     // Create the responder here
@@ -1603,9 +1598,6 @@ public abstract class Server {
     if (this.rpcMetrics != null) {
       this.rpcMetrics.shutdown();
     }
-    if (this.rpcDetailedMetrics != null) {
-      this.rpcDetailedMetrics.shutdown();
-    }
   }
 
   /** Wait for the server to be stopped.
@@ -1704,7 +1696,7 @@ public abstract class Server {
     int count =  (buffer.remaining() <= NIO_BUFFER_LIMIT) ?
                  channel.write(buffer) : channelIO(null, channel, buffer);
     if (count > 0) {
-      rpcMetrics.sentBytes.inc(count);
+      rpcMetrics.incrSentBytes(count);
     }
     return count;
   }
@@ -1724,7 +1716,7 @@ public abstract class Server {
     int count = (buffer.remaining() <= NIO_BUFFER_LIMIT) ?
                 channel.read(buffer) : channelIO(channel, null, buffer);
     if (count > 0) {
-      rpcMetrics.receivedBytes.inc(count);
+      rpcMetrics.incrReceivedBytes(count);
     }
     return count;
   }

+ 0 - 78
src/core/org/apache/hadoop/ipc/metrics/RpcActivityMBean.java

@@ -1,78 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ipc.metrics;
-
-import javax.management.ObjectName;
-
-import org.apache.hadoop.metrics.util.MBeanUtil;
-import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-
-
-
-/**
- * 
- * This is the JMX MBean for reporting the RPC layer Activity.
- * The MBean is register using the name
- *        "hadoop:service=<RpcServiceName>,name=RpcActivityForPort<port>"
- * 
- * Many of the activity metrics are sampled and averaged on an interval 
- * which can be specified in the metrics config file.
- * <p>
- * For the metrics that are sampled and averaged, one must specify 
- * a metrics context that does periodic update calls. Most metrics contexts do.
- * The default Null metrics context however does NOT. So if you aren't
- * using any other metrics context then you can turn on the viewing and averaging
- * of sampled metrics by  specifying the following two lines
- *  in the hadoop-meterics.properties file:
- *  <pre>
- *        rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
- *        rpc.period=10
- *  </pre>
- *<p>
- * Note that the metrics are collected regardless of the context used.
- * The context with the update thread is used to average the data periodically
- *
- *
- *
- * Impl details: We use a dynamic mbean that gets the list of the metrics
- * from the metrics registry passed as an argument to the constructor
- */
-
-public class RpcActivityMBean extends MetricsDynamicMBeanBase {
-  private final ObjectName mbeanName;
-
-  /**
-   * 
-   * @param mr - the metrics registry that has all the metrics
-   * @param serviceName - the service name for the rpc service 
-   * @param port - the rpc port.
-   */
-  public RpcActivityMBean(final MetricsRegistry mr, final String serviceName,
-      final String port) {
-    super(mr, "Rpc layer statistics");
-    mbeanName = MBeanUtil.registerMBean(serviceName,
-          "RpcActivityForPort" + port, this);
-  }
-  
-
-  public void shutdown() {
-    if (mbeanName != null)
-      MBeanUtil.unregisterMBean(mbeanName);
-  }
-}

+ 0 - 72
src/core/org/apache/hadoop/ipc/metrics/RpcDetailedActivityMBean.java

@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ipc.metrics;
-
-import javax.management.ObjectName;
-
-import org.apache.hadoop.metrics.util.MBeanUtil;
-import org.apache.hadoop.metrics.util.MetricsDynamicMBeanBase;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-
-/**
- * 
- * This is the JMX MBean for reporting the RPC layer Activity. The MBean is
- * register using the name
- * "hadoop:service=<RpcServiceName>,name=RpcDetailedActivityForPort<port>"
- * 
- * Many of the activity metrics are sampled and averaged on an interval which
- * can be specified in the metrics config file.
- * <p>
- * For the metrics that are sampled and averaged, one must specify a metrics
- * context that does periodic update calls. Most metrics contexts do. The
- * default Null metrics context however does NOT. So if you aren't using any
- * other metrics context then you can turn on the viewing and averaging of
- * sampled metrics by specifying the following two lines in the
- * hadoop-meterics.properties file:
- * 
- * <pre>
- *        rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
- *        rpc.period=10
- * </pre>
- *<p>
- * Note that the metrics are collected regardless of the context used. The
- * context with the update thread is used to average the data periodically
- * 
- * Impl details: We use a dynamic mbean that gets the list of the metrics from
- * the metrics registry passed as an argument to the constructor
- */
-public class RpcDetailedActivityMBean extends MetricsDynamicMBeanBase {
-  private final ObjectName mbeanName;
-
-  /**
-   * @param mr - the metrics registry that has all the metrics
-   * @param serviceName - the service name for the rpc service
-   * @param port - the rpc port.
-   */
-  public RpcDetailedActivityMBean(final MetricsRegistry mr,
-      final String serviceName, final String port) {
-    super(mr, "Rpc layer detailed statistics");
-    mbeanName = MBeanUtil.registerMBean(serviceName,
-        "RpcDetailedActivityForPort" + port, this);
-  }
-
-  public void shutdown() {
-    if (mbeanName != null)
-      MBeanUtil.unregisterMBean(mbeanName);
-  }
-}

+ 0 - 82
src/core/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java

@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ipc.metrics;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
-
-/**
- * 
- * This class is for maintaining  the various RPC method related statistics
- * and publishing them through the metrics interfaces.
- * This also registers the JMX MBean for RPC.
- */
-public class RpcDetailedMetrics implements Updater {
-  public final MetricsRegistry registry = new MetricsRegistry();
-  private final MetricsRecord metricsRecord;
-  private static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class);
-  RpcDetailedActivityMBean rpcMBean;
-  
-  /**
-   * Statically added metrics to expose at least one metrics, without
-   * which other dynamically added metrics are not exposed over JMX.
-   */
-  final MetricsTimeVaryingRate getProtocolVersion = 
-    new MetricsTimeVaryingRate("getProtocolVersion", registry);
-  
-  public RpcDetailedMetrics(final String hostName, final String port) {
-    MetricsContext context = MetricsUtil.getContext("rpc");
-    metricsRecord = MetricsUtil.createRecord(context, "detailed-metrics");
-
-    metricsRecord.setTag("port", port);
-
-    LOG.info("Initializing RPC Metrics with hostName=" 
-        + hostName + ", port=" + port);
-
-    context.registerUpdater(this);
-    
-    // Need to clean up the interface to RpcMgt - don't need both metrics and server params
-    rpcMBean = new RpcDetailedActivityMBean(registry, hostName, port);
-  }
-  
-  
-  /**
-   * Push the metrics to the monitoring subsystem on doUpdate() call.
-   */
-  public void doUpdates(final MetricsContext context) {
-    
-    synchronized (this) {
-      for (MetricsBase m : registry.getMetricsList()) {
-        m.pushMetric(metricsRecord);
-      }
-    }
-    metricsRecord.update();
-  }
-
-  public void shutdown() {
-    if (rpcMBean != null) 
-      rpcMBean.shutdown();
-  }
-}

+ 210 - 0
src/core/org/apache/hadoop/ipc/metrics/RpcInstrumentation.java

@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ipc.metrics;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.MetricsBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.lib.AbstractMetricsSource;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricMutableCounterInt;
+import org.apache.hadoop.metrics2.lib.MetricMutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MetricMutableGaugeInt;
+import org.apache.hadoop.metrics2.lib.MetricMutableStat;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+
+/**
+ * The RPC metrics instrumentation
+ */
+public class RpcInstrumentation implements MetricsSource {
+
+  static final Log LOG = LogFactory.getLog(RpcInstrumentation.class);
+
+  final MetricsRegistry registry = new MetricsRegistry("rpc");
+  final MetricMutableCounterInt authenticationSuccesses =
+      registry.newCounter("rpcAuthenticationSuccesses",
+                          "RPC authentication successes count", 0);
+  final MetricMutableCounterInt authenticationFailures =
+      registry.newCounter("rpcAuthenticationFailures",
+                          "RPC authentication failures count", 0);
+  final MetricMutableCounterInt authorizationSuccesses =
+      registry.newCounter("rpcAuthorizationSuccesses",
+                          "RPC authorization successes count", 0);
+  final MetricMutableCounterInt authorizationFailures =
+      registry.newCounter("rpcAuthorizationFailures",
+                          "RPC authorization failures count", 0);
+  final MetricMutableCounterLong receivedBytes =
+      registry.newCounter("ReceivedBytes", "RPC received bytes count", 0L);
+  final MetricMutableCounterLong sentBytes =
+      registry.newCounter("SentBytes", "RPC sent bytes count", 0L);
+  final MetricMutableStat rpcQueueTime = registry.newStat("RpcQueueTime",
+      "RPC queue time stats", "ops", "time");
+  final MetricMutableStat rpcProcessingTime = registry.newStat(
+      "RpcProcessingTime", "RPC processing time", "ops", "time");
+  final MetricMutableGaugeInt numOpenConnections = registry.newGauge(
+      "NumOpenConnections", "Number of open connections", 0);
+  final MetricMutableGaugeInt callQueueLen = registry.newGauge("callQueueLen",
+      "RPC call queue length", 0);
+
+  final Detailed detailed;
+
+  RpcInstrumentation(String serverName, int port) {
+    String portStr = String.valueOf(port);
+    registry.setContext("rpc").tag("port", "RPC port", portStr);
+    detailed = new Detailed(portStr);
+  }
+
+  @Override
+  public void getMetrics(MetricsBuilder builder, boolean all) {
+    registry.snapshot(builder.addRecord(registry.name()), all);
+  }
+
+  /**
+   * Create an RPC instrumentation object
+   * @param serverName  name of the server
+   * @param port  the RPC port
+   * @return the instrumentation object
+   */
+  public static RpcInstrumentation create(String serverName, int port) {
+    return create(serverName, port, DefaultMetricsSystem.INSTANCE);
+  }
+
+  /**
+   * Create an RPC instrumentation object
+   * Mostly useful for testing.
+   * @param serverName  name of the server
+   * @param port  the RPC port
+   * @param ms  the metrics system object
+   * @return the instrumentation object
+   */
+  public static RpcInstrumentation create(String serverName, int port,
+                                          MetricsSystem ms) {
+    RpcInstrumentation rpc = new RpcInstrumentation(serverName, port);
+    ms.register("RpcDetailedActivityForPort"+ port, "Per call", rpc.detailed());
+    return ms.register("RpcActivityForPort"+ port, "Aggregate metrics", rpc);
+  }
+
+  /**
+   * @return the detailed (per call) metrics source for RPC
+   */
+  public MetricsSource detailed() {
+    return detailed;
+  }
+
+  // Start of public instrumentation methods that could be extracted to an
+  // abstract class if we decide to allow custom instrumentation classes a la
+  // JobTrackerInstrumenation.
+
+  /**
+   * One authentication failure event
+   */
+  public void incrAuthenticationFailures() {
+    this.authenticationFailures.incr();
+  }
+
+  /**
+   * One authentication success event
+   */
+  public void incrAuthenticationSuccesses() {
+    this.authenticationSuccesses.incr();
+  }
+
+  /**
+   * One authorization success event
+   */
+  public void incrAuthorizationSuccesses() {
+    this.authorizationSuccesses.incr();
+  }
+
+  /**
+   * One authorization failure event
+   */
+  public void incrAuthorizationFailures() {
+    this.authorizationFailures.incr();
+  }
+
+  /**
+   * Shutdown the instrumentation for the process
+   */
+  public void shutdown() {
+    LOG.info("shut down");
+  }
+
+  /**
+   * Increment sent bytes by count
+   * @param count to increment
+   */
+  public void incrSentBytes(int count) {
+    this.sentBytes.incr(count);
+  }
+
+  /**
+   * Increment received bytes by count
+   * @param count to increment
+   */
+  public void incrReceivedBytes(int count) {
+    this.receivedBytes.incr(count);
+  }
+
+  /**
+   * Add an RPC queue time sample
+   * @param qTime
+   */
+  public void addRpcQueueTime(int qTime) {
+    this.rpcQueueTime.add(qTime);
+  }
+
+  /**
+   * Add an RPC processing time sample
+   * @param processingTime
+   */
+  public void addRpcProcessingTime(int processingTime) {
+    this.rpcProcessingTime.add(processingTime);
+  }
+
+  /**
+   * Add an RPC processing time sample for a particular RPC method
+   * @param methodName  method name of the RPC
+   * @param processingTime  elapsed processing time of the RPC
+   */
+  public void addRpcProcessingTime(String methodName, int processingTime) {
+    detailed.addRpcProcessingTime(methodName, processingTime);
+  }
+
+  /**
+   * Use a separate source for detailed (per call) RPC metrics for
+   * easy and efficient filtering
+   */
+  public static class Detailed extends AbstractMetricsSource {
+
+    Detailed(String port) {
+      super("rpcdetailed");
+      registry.setContext("rpcdetailed").tag("port", "RPC port", port);
+    }
+
+    public synchronized void addRpcProcessingTime(String methodName,
+                                                  int processingTime) {
+      registry.add(methodName, processingTime);
+    }
+
+  }
+
+}

+ 0 - 152
src/core/org/apache/hadoop/ipc/metrics/RpcMetrics.java

@@ -1,152 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ipc.metrics;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsIntValue;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingLong;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
-
-/**
- * 
- * This class is for maintaining  the various RPC statistics
- * and publishing them through the metrics interfaces.
- * This also registers the JMX MBean for RPC.
- * <p>
- * This class has a number of metrics variables that are publicly accessible;
- * these variables (objects) have methods to update their values;
- * for example:
- *  <p> {@link #rpcQueueTime}.inc(time)
- *
- */
-public class RpcMetrics implements Updater {
-  private final MetricsRegistry registry = new MetricsRegistry();
-  private final MetricsRecord metricsRecord;
-  private final Server myServer;
-  private static final Log LOG = LogFactory.getLog(RpcMetrics.class);
-  RpcActivityMBean rpcMBean;
-  
-  public RpcMetrics(final String hostName, final String port,
-      final Server server) {
-    myServer = server;
-    MetricsContext context = MetricsUtil.getContext("rpc");
-    metricsRecord = MetricsUtil.createRecord(context, "metrics");
-
-    metricsRecord.setTag("port", port);
-
-    LOG.info("Initializing RPC Metrics with hostName=" 
-        + hostName + ", port=" + port);
-
-    context.registerUpdater(this);
-    
-    // Need to clean up the interface to RpcMgt - don't need both metrics and server params
-    rpcMBean = new RpcActivityMBean(registry, hostName, port);
-  }
-  
-  
-  /**
-   * The metrics variables are public:
-   *  - they can be set directly by calling their set/inc methods
-   *  -they can also be read directly - e.g. JMX does this.
-   */
-
-  /**
-   * metrics - number of bytes received
-   */
-  public final MetricsTimeVaryingLong receivedBytes = 
-         new MetricsTimeVaryingLong("ReceivedBytes", registry);
-  /**
-   * metrics - number of bytes sent
-   */
-  public final MetricsTimeVaryingLong sentBytes = 
-         new MetricsTimeVaryingLong("SentBytes", registry);
-  /**
-   * metrics - rpc queue time
-   */
-  public final MetricsTimeVaryingRate rpcQueueTime =
-          new MetricsTimeVaryingRate("RpcQueueTime", registry);
-  /**
-   * metrics - rpc processing time
-   */
-  public final MetricsTimeVaryingRate rpcProcessingTime =
-          new MetricsTimeVaryingRate("RpcProcessingTime", registry);
-  /**
-   * metrics - number of open connections
-   */
-  public final MetricsIntValue numOpenConnections = 
-          new MetricsIntValue("NumOpenConnections", registry);
-  /**
-   * metrics - length of the queue
-   */
-  public final MetricsIntValue callQueueLen = 
-          new MetricsIntValue("callQueueLen", registry);
-  /**
-   * metrics - number of failed authentications
-   */
-  public final MetricsTimeVaryingInt authenticationFailures = 
-          new MetricsTimeVaryingInt("rpcAuthenticationFailures", registry);
-  /**
-   * metrics - number of successful authentications
-   */
-  public final MetricsTimeVaryingInt authenticationSuccesses = 
-          new MetricsTimeVaryingInt("rpcAuthenticationSuccesses", registry);
-  /**
-   * metrics - number of failed authorizations
-   */
-  public final MetricsTimeVaryingInt authorizationFailures = 
-          new MetricsTimeVaryingInt("rpcAuthorizationFailures", registry);
-  /**
-   * metrics - number of successful authorizations
-   */
-  public final MetricsTimeVaryingInt authorizationSuccesses = 
-         new MetricsTimeVaryingInt("rpcAuthorizationSuccesses", registry);
-  
-  /**
-   * Push the metrics to the monitoring subsystem on doUpdate() call.
-   */
-  public void doUpdates(final MetricsContext context) {
-    
-    synchronized (this) {
-      // ToFix - fix server to use the following two metrics directly so
-      // the metrics do not have be copied here.
-      numOpenConnections.set(myServer.getNumOpenConnections());
-      callQueueLen.set(myServer.getCallQueueLen());
-      for (MetricsBase m : registry.getMetricsList()) {
-        m.pushMetric(metricsRecord);
-      }
-    }
-    metricsRecord.update();
-  }
-
-  /**
-   * shutdown the metrics
-   */
-  public void shutdown() {
-    if (rpcMBean != null) 
-      rpcMBean.shutdown();
-  }
-}

+ 0 - 119
src/core/org/apache/hadoop/ipc/metrics/RpcMgt.java

@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ipc.metrics;
-
-
-import javax.management.ObjectName;
-
-import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.metrics.util.MBeanUtil;
-
-
-/**
- * This class implements the RpcMgt MBean
- *
- */
-class RpcMgt implements RpcMgtMBean {
-  private RpcMetrics myMetrics;
-  private Server myServer;
-  private ObjectName mbeanName;
-  
-  RpcMgt(final String serviceName, final String port,
-                final RpcMetrics metrics, Server server) {
-    myMetrics = metrics;
-    myServer = server;
-    mbeanName = MBeanUtil.registerMBean(serviceName,
-                    "RpcStatisticsForPort" + port, this);
-  }
-
-  public void shutdown() {
-    if (mbeanName != null)
-      MBeanUtil.unregisterMBean(mbeanName);
-  }
-  
-  /**
-   * @inheritDoc
-   */
-  public long getRpcOpsAvgProcessingTime() {
-    return myMetrics.rpcProcessingTime.getPreviousIntervalAverageTime();
-  }
-  
-  /**
-   * @inheritDoc
-   */
-  public long getRpcOpsAvgProcessingTimeMax() {
-    return myMetrics.rpcProcessingTime.getMaxTime();
-  }
-
-  /**
-   * @inheritDoc
-   */
-  public long getRpcOpsAvgProcessingTimeMin() {
-    return myMetrics.rpcProcessingTime.getMinTime();
-  }
-
-  /**
-   * @inheritDoc
-   */
-  public long getRpcOpsAvgQueueTime() {
-    return myMetrics.rpcQueueTime.getPreviousIntervalAverageTime();
-  }
-  
-  /**
-   * @inheritDoc
-   */
-  public long getRpcOpsAvgQueueTimeMax() {
-    return myMetrics.rpcQueueTime.getMaxTime();
-  }
-
-  /**
-   * @inheritDoc
-   */
-  public long getRpcOpsAvgQueueTimeMin() {
-    return myMetrics.rpcQueueTime.getMinTime();
-  }
-
-  /**
-   * @inheritDoc
-   */
-  public int getRpcOpsNumber() {
-    return myMetrics.rpcProcessingTime.getPreviousIntervalNumOps() ;
-  }
-
-  /**
-   * @inheritDoc
-   */
-  public int getNumOpenConnections() {
-    return myServer.getNumOpenConnections();
-  }
-  
-  /**
-   * @inheritDoc
-   */
-  public int getCallQueueLen() {
-    return myServer.getCallQueueLen();
-  }
-
-  /**
-   * @inheritDoc
-   */
-  public void resetAllMinMax() {
-    myMetrics.rpcProcessingTime.resetMinMax();
-    myMetrics.rpcQueueTime.resetMinMax();
-  }
-}

+ 0 - 105
src/core/org/apache/hadoop/ipc/metrics/RpcMgtMBean.java

@@ -1,105 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ipc.metrics;
-
-
-/**
- * 
- * This is the JMX management interface for the RPC layer.
- * Many of the statistics are sampled and averaged on an interval 
- * which can be specified in the metrics config file.
- * <p>
- * For the statistics that are sampled and averaged, one must specify 
- * a metrics context that does periodic update calls. Most do.
- * The default Null metrics context however does NOT. So if you aren't
- * using any other metrics context then you can turn on the viewing and averaging
- * of sampled metrics by  specifying the following two lines
- *  in the hadoop-meterics.properties file:
- *  <pre>
- *        rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
- *        rpc.period=10
- *  </pre>
- *<p>
- * Note that the metrics are collected regardless of the context used.
- * The context with the update thread is used to average the data periodically
- *
- */
-public interface RpcMgtMBean {
-  
-  /**
-   * Number of RPC Operations in the last interval
-   * @return number of operations
-   */
-  int getRpcOpsNumber();
-  
-  /**
-   * Average time for RPC Operations in last interval
-   * @return time in msec
-   */
-  long getRpcOpsAvgProcessingTime();
-  
-  /**
-   * The Minimum RPC Operation Processing Time since reset was called
-   * @return time in msec
-   */
-  long getRpcOpsAvgProcessingTimeMin();
-  
-  
-  /**
-   * The Maximum RPC Operation Processing Time since reset was called
-   * @return time in msec
-   */
-  long getRpcOpsAvgProcessingTimeMax();
-  
-  
-  /**
-   * The Average RPC Operation Queued Time in the last interval
-   * @return time in msec
-   */
-  long getRpcOpsAvgQueueTime();
-  
-  
-  /**
-   * The Minimum RPC Operation Queued Time since reset was called
-   * @return time in msec
-   */
-  long getRpcOpsAvgQueueTimeMin();
-  
-  /**
-   * The Maximum RPC Operation Queued Time since reset was called
-   * @return time in msec
-   */
-  long getRpcOpsAvgQueueTimeMax();
-  
-  /**
-   * Reset all min max times
-   */
-  void resetAllMinMax();
-  
-  /**
-   * The number of open RPC conections
-   * @return the number of open rpc connections
-   */
-  public int getNumOpenConnections();
-  
-  /**
-   * The number of rpc calls in the queue.
-   * @return The number of rpc calls in the queue.
-   */
-  public int getCallQueueLen();
-}

+ 1 - 3
src/core/org/apache/hadoop/metrics/jvm/EventCounter.java → src/core/org/apache/hadoop/log/EventCounter.java

@@ -15,10 +15,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.metrics.jvm;
+package org.apache.hadoop.log;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.Level;
 import org.apache.log4j.spi.LoggingEvent;

+ 0 - 193
src/core/org/apache/hadoop/metrics/ContextFactory.java

@@ -1,193 +0,0 @@
-/*
- * ContextFactory.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Properties;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
-import org.apache.hadoop.metrics.spi.NullContext;
-
-/**
- * Factory class for creating MetricsContext objects.  To obtain an instance
- * of this class, use the static <code>getFactory()</code> method.
- */
-public class ContextFactory {
-    
-  private static final String PROPERTIES_FILE = 
-    "/hadoop-metrics.properties";
-  private static final String CONTEXT_CLASS_SUFFIX =
-    ".class";
-  private static final String DEFAULT_CONTEXT_CLASSNAME =
-    "org.apache.hadoop.metrics.spi.NullContext";
-    
-  private static ContextFactory theFactory = null;
-    
-  private Map<String,Object> attributeMap = new HashMap<String,Object>();
-  private Map<String,MetricsContext> contextMap = 
-    new HashMap<String,MetricsContext>();
-    
-  // Used only when contexts, or the ContextFactory itself, cannot be
-  // created.
-  private static Map<String,MetricsContext> nullContextMap = 
-    new HashMap<String,MetricsContext>();
-    
-  /** Creates a new instance of ContextFactory */
-  protected ContextFactory() {
-  }
-    
-  /**
-   * Returns the value of the named attribute, or null if there is no 
-   * attribute of that name.
-   *
-   * @param attributeName the attribute name
-   * @return the attribute value
-   */
-  public Object getAttribute(String attributeName) {
-    return attributeMap.get(attributeName);
-  }
-    
-  /**
-   * Returns the names of all the factory's attributes.
-   * 
-   * @return the attribute names
-   */
-  public String[] getAttributeNames() {
-    String[] result = new String[attributeMap.size()];
-    int i = 0;
-    // for (String attributeName : attributeMap.keySet()) {
-    Iterator it = attributeMap.keySet().iterator();
-    while (it.hasNext()) {
-      result[i++] = (String) it.next();
-    }
-    return result;
-  }
-    
-  /**
-   * Sets the named factory attribute to the specified value, creating it
-   * if it did not already exist.  If the value is null, this is the same as
-   * calling removeAttribute.
-   *
-   * @param attributeName the attribute name
-   * @param value the new attribute value
-   */
-  public void setAttribute(String attributeName, Object value) {
-    attributeMap.put(attributeName, value);
-  }
-
-  /**
-   * Removes the named attribute if it exists.
-   *
-   * @param attributeName the attribute name
-   */
-  public void removeAttribute(String attributeName) {
-    attributeMap.remove(attributeName);
-  }
-    
-  /**
-   * Returns the named MetricsContext instance, constructing it if necessary 
-   * using the factory's current configuration attributes. <p/>
-   * 
-   * When constructing the instance, if the factory property 
-   * <i>contextName</i>.class</code> exists, 
-   * its value is taken to be the name of the class to instantiate.  Otherwise,
-   * the default is to create an instance of 
-   * <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a 
-   * dummy "no-op" context which will cause all metric data to be discarded.
-   * 
-   * @param contextName the name of the context
-   * @return the named MetricsContext
-   */
-  public synchronized MetricsContext getContext(String refName, String contextName)
-      throws IOException, ClassNotFoundException,
-             InstantiationException, IllegalAccessException {
-    MetricsContext metricsContext = contextMap.get(refName);
-    if (metricsContext == null) {
-      String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
-      String className = (String) getAttribute(classNameAttribute);
-      if (className == null) {
-        className = DEFAULT_CONTEXT_CLASSNAME;
-      }
-      Class contextClass = Class.forName(className);
-      metricsContext = (MetricsContext) contextClass.newInstance();
-      metricsContext.init(contextName, this);
-      contextMap.put(contextName, metricsContext);
-    }
-    return metricsContext;
-  }
-
-  public synchronized MetricsContext getContext(String contextName)
-    throws IOException, ClassNotFoundException, InstantiationException,
-           IllegalAccessException {
-    return getContext(contextName, contextName);
-  }
-    
-  /**
-   * Returns a "null" context - one which does nothing.
-   */
-  public static synchronized MetricsContext getNullContext(String contextName) {
-    MetricsContext nullContext = nullContextMap.get(contextName);
-    if (nullContext == null) {
-      nullContext = new NullContext();
-      nullContextMap.put(contextName, nullContext);
-    }
-    return nullContext;
-  }
-    
-  /**
-   * Returns the singleton ContextFactory instance, constructing it if 
-   * necessary. <p/>
-   * 
-   * When the instance is constructed, this method checks if the file 
-   * <code>hadoop-metrics.properties</code> exists on the class path.  If it 
-   * exists, it must be in the format defined by java.util.Properties, and all 
-   * the properties in the file are set as attributes on the newly created
-   * ContextFactory instance.
-   *
-   * @return the singleton ContextFactory instance
-   */
-  public static synchronized ContextFactory getFactory() throws IOException {
-    if (theFactory == null) {
-      theFactory = new ContextFactory();
-      theFactory.setAttributes();
-    }
-    return theFactory;
-  }
-    
-  private void setAttributes() throws IOException {
-    InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
-    if (is != null) {
-      Properties properties = new Properties();
-      properties.load(is);
-      //for (Object propertyNameObj : properties.keySet()) {
-      Iterator it = properties.keySet().iterator();
-      while (it.hasNext()) {
-        String propertyName = (String) it.next();
-        String propertyValue = properties.getProperty(propertyName);
-        setAttribute(propertyName, propertyValue);
-      }
-    }
-  }
-    
-}

+ 0 - 107
src/core/org/apache/hadoop/metrics/MetricsContext.java

@@ -1,107 +0,0 @@
-/*
- * MetricsContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-
-/**
- * The main interface to the metrics package. 
- */
-public interface MetricsContext {
-    
-  /**
-   * Default period in seconds at which data is sent to the metrics system.
-   */
-  public static final int DEFAULT_PERIOD = 5;
-
-  /**
-   * Initialize this context.
-   * @param contextName The given name for this context
-   * @param factory The creator of this context
-   */
-  public void init(String contextName, ContextFactory factory);
-
-  /**
-   * Returns the context name.
-   *
-   * @return the context name
-   */
-  public abstract String getContextName();
-    
-  /**
-   * Starts or restarts monitoring, the emitting of metrics records as they are 
-   * updated. 
-   */
-  public abstract void startMonitoring()
-    throws IOException;
-
-  /**
-   * Stops monitoring.  This does not free any data that the implementation
-   * may have buffered for sending at the next timer event. It
-   * is OK to call <code>startMonitoring()</code> again after calling 
-   * this.
-   * @see #close()
-   */
-  public abstract void stopMonitoring();
-    
-  /**
-   * Returns true if monitoring is currently in progress.
-   */
-  public abstract boolean isMonitoring();
-    
-  /**
-   * Stops monitoring and also frees any buffered data, returning this 
-   * object to its initial state.  
-   */
-  public abstract void close();
-    
-  /**
-   * Creates a new MetricsRecord instance with the given <code>recordName</code>.
-   * Throws an exception if the metrics implementation is configured with a fixed
-   * set of record names and <code>recordName</code> is not in that set.
-   *
-   * @param recordName the name of the record
-   * @throws MetricsException if recordName conflicts with configuration data
-   */
-  public abstract MetricsRecord createRecord(String recordName);
-    
-  /**
-   * Registers a callback to be called at regular time intervals, as 
-   * determined by the implementation-class specific configuration.
-   *
-   * @param updater object to be run periodically; it should updated
-   * some metrics records and then return
-   */
-  public abstract void registerUpdater(Updater updater);
-
-  /**
-   * Removes a callback, if it exists.
-   * 
-   * @param updater object to be removed from the callback list
-   */
-  public abstract void unregisterUpdater(Updater updater);
-  
-  /**
-   * Returns the timer period.
-   */
-  public abstract int getPeriod();
-    
-}

+ 0 - 246
src/core/org/apache/hadoop/metrics/MetricsRecord.java

@@ -1,246 +0,0 @@
-/*
- * MetricsRecord.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-/**
- * A named and optionally tagged set of records to be sent to the metrics
- * system. <p/>
- *
- * A record name identifies the kind of data to be reported. For example, a
- * program reporting statistics relating to the disks on a computer might use
- * a record name "diskStats".<p/>
- *
- * A record has zero or more <i>tags</i>. A tag has a name and a value. To
- * continue the example, the "diskStats" record might use a tag named
- * "diskName" to identify a particular disk.  Sometimes it is useful to have
- * more than one tag, so there might also be a "diskType" with value "ide" or
- * "scsi" or whatever.<p/>
- *
- * A record also has zero or more <i>metrics</i>.  These are the named
- * values that are to be reported to the metrics system.  In the "diskStats"
- * example, possible metric names would be "diskPercentFull", "diskPercentBusy", 
- * "kbReadPerSecond", etc.<p/>
- * 
- * The general procedure for using a MetricsRecord is to fill in its tag and
- * metric values, and then call <code>update()</code> to pass the record to the
- * client library.
- * Metric data is not immediately sent to the metrics system
- * each time that <code>update()</code> is called. 
- * An internal table is maintained, identified by the record name. This
- * table has columns 
- * corresponding to the tag and the metric names, and rows 
- * corresponding to each unique set of tag values. An update
- * either modifies an existing row in the table, or adds a new row with a set of
- * tag values that are different from all the other rows.  Note that if there
- * are no tags, then there can be at most one row in the table. <p/>
- * 
- * Once a row is added to the table, its data will be sent to the metrics system 
- * on every timer period, whether or not it has been updated since the previous
- * timer period.  If this is inappropriate, for example if metrics were being
- * reported by some transient object in an application, the <code>remove()</code>
- * method can be used to remove the row and thus stop the data from being
- * sent.<p/>
- *
- * Note that the <code>update()</code> method is atomic.  This means that it is
- * safe for different threads to be updating the same metric.  More precisely,
- * it is OK for different threads to call <code>update()</code> on MetricsRecord instances 
- * with the same set of tag names and tag values.  Different threads should 
- * <b>not</b> use the same MetricsRecord instance at the same time.
- */
-public interface MetricsRecord {
-    
-  /**
-   * Returns the record name. 
-   *
-   * @return the record name
-   */
-  public abstract String getRecordName();
-    
-  /**
-   * Sets the named tag to the specified value.  The tagValue may be null, 
-   * which is treated the same as an empty String.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, String tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, int tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, long tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, short tagValue);
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public abstract void setTag(String tagName, byte tagValue);
-    
-  /**
-   * Removes any tag of the specified name.
-   *
-   * @param tagName name of a tag
-   */
-  public abstract void removeTag(String tagName);
-  
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, int metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, long metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, short metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, byte metricValue);
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void setMetric(String metricName, float metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, int metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, long metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, short metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, byte metricValue);
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public abstract void incrMetric(String metricName, float metricValue);
-    
-  /**
-   * Updates the table of buffered data which is to be sent periodically.
-   * If the tag values match an existing row, that row is updated; 
-   * otherwise, a new row is added.
-   */
-  public abstract void update();
-    
-  /**
-   * Removes, from the buffered data table, all rows having tags 
-   * that equal the tags that have been set on this record. For example,
-   * if there are no tags on this record, all rows for this record name
-   * would be removed.  Or, if there is a single tag on this record, then
-   * just rows containing a tag with the same name and value would be removed.
-   */
-  public abstract void remove();
-    
-}

+ 0 - 100
src/core/org/apache/hadoop/metrics/MetricsUtil.java

@@ -1,100 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Utility class to simplify creation and reporting of hadoop metrics.
- *
- * For examples of usage, see NameNodeMetrics.
- * @see org.apache.hadoop.metrics.MetricsRecord
- * @see org.apache.hadoop.metrics.MetricsContext
- * @see org.apache.hadoop.metrics.ContextFactory
- */
-public class MetricsUtil {
-    
-  public static final Log LOG =
-    LogFactory.getLog(MetricsUtil.class);
-
-  /**
-   * Don't allow creation of a new instance of Metrics
-   */
-  private MetricsUtil() {}
-    
-  public static MetricsContext getContext(String contextName) {
-    return getContext(contextName, contextName);
-  }
-
-  /**
-   * Utility method to return the named context.
-   * If the desired context cannot be created for any reason, the exception
-   * is logged, and a null context is returned.
-   */
-  public static MetricsContext getContext(String refName, String contextName) {
-    MetricsContext metricsContext;
-    try {
-      metricsContext =
-        ContextFactory.getFactory().getContext(refName, contextName);
-      if (!metricsContext.isMonitoring()) {
-        metricsContext.startMonitoring();
-      }
-    } catch (Exception ex) {
-      LOG.error("Unable to create metrics context " + contextName, ex);
-      metricsContext = ContextFactory.getNullContext(contextName);
-    }
-    return metricsContext;
-  }
-
-  /**
-   * Utility method to create and return new metrics record instance within the
-   * given context. This record is tagged with the host name.
-   *
-   * @param context the context
-   * @param recordName name of the record
-   * @return newly created metrics record
-   */
-  public static MetricsRecord createRecord(MetricsContext context, 
-                                           String recordName) 
-  {
-    MetricsRecord metricsRecord = context.createRecord(recordName);
-    metricsRecord.setTag("hostName", getHostName());
-    return metricsRecord;        
-  }
-    
-  /**
-   * Returns the host name.  If the host name is unobtainable, logs the
-   * exception and returns "unknown".
-   */
-  private static String getHostName() {
-    String hostName = null;
-    try {
-      hostName = InetAddress.getLocalHost().getHostName();
-    } 
-    catch (UnknownHostException ex) {
-      LOG.info("Unable to obtain hostName", ex);
-      hostName = "unknown";
-    }
-    return hostName;
-  }
-
-}

+ 0 - 151
src/core/org/apache/hadoop/metrics/file/FileContext.java

@@ -1,151 +0,0 @@
-/*
- * FileContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.file;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.MetricsException;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
-import org.apache.hadoop.metrics.spi.OutputRecord;
-
-/**
- * Metrics context for writing metrics to a file.<p/>
- *
- * This class is configured by setting ContextFactory attributes which in turn
- * are usually configured through a properties file.  All the attributes are
- * prefixed by the contextName. For example, the properties file might contain:
- * <pre>
- * myContextName.fileName=/tmp/metrics.log
- * myContextName.period=5
- * </pre>
- */
-public class FileContext extends AbstractMetricsContext {
-    
-  /* Configuration attribute names */
-  protected static final String FILE_NAME_PROPERTY = "fileName";
-  protected static final String PERIOD_PROPERTY = "period";
-    
-  private File file = null;              // file for metrics to be written to
-  private PrintWriter writer = null;
-    
-  /** Creates a new instance of FileContext */
-  public FileContext() {}
-    
-  public void init(String contextName, ContextFactory factory) {
-    super.init(contextName, factory);
-        
-    String fileName = getAttribute(FILE_NAME_PROPERTY);
-    if (fileName != null) {
-      file = new File(fileName);
-    }
-        
-    String periodStr = getAttribute(PERIOD_PROPERTY);
-    if (periodStr != null) {
-      int period = 0;
-      try {
-        period = Integer.parseInt(periodStr);
-      } catch (NumberFormatException nfe) {
-      }
-      if (period <= 0) {
-        throw new MetricsException("Invalid period: " + periodStr);
-      }
-      setPeriod(period);
-    }
-  }
-
-  /**
-   * Returns the configured file name, or null.
-   */
-  public String getFileName() {
-    if (file == null) {
-      return null;
-    } else {
-      return file.getName();
-    }
-  }
-    
-  /**
-   * Starts or restarts monitoring, by opening in append-mode, the
-   * file specified by the <code>fileName</code> attribute,
-   * if specified. Otherwise the data will be written to standard
-   * output.
-   */
-  public void startMonitoring()
-    throws IOException 
-  {
-    if (file == null) {
-      writer = new PrintWriter(new BufferedOutputStream(System.out));
-    } else {
-      writer = new PrintWriter(new FileWriter(file, true));
-    }
-    super.startMonitoring();
-  }
-    
-  /**
-   * Stops monitoring, closing the file.
-   * @see #close()
-   */
-  public void stopMonitoring() {
-    super.stopMonitoring();
-        
-    if (writer != null) {
-      writer.close();
-      writer = null;
-    }
-  }
-    
-  /**
-   * Emits a metrics record to a file.
-   */
-  public void emitRecord(String contextName, String recordName, OutputRecord outRec) {
-    writer.print(contextName);
-    writer.print(".");
-    writer.print(recordName);
-    String separator = ": ";
-    for (String tagName : outRec.getTagNames()) {
-      writer.print(separator);
-      separator = ", ";
-      writer.print(tagName);
-      writer.print("=");
-      writer.print(outRec.getTag(tagName));
-    }
-    for (String metricName : outRec.getMetricNames()) {
-      writer.print(separator);
-      separator = ", ";
-      writer.print(metricName);
-      writer.print("=");
-      writer.print(outRec.getMetric(metricName));
-    }
-    writer.println();
-  }
-    
-  /**
-   * Flushes the output writer, forcing updates to disk.
-   */
-  public void flush() {
-    writer.flush();
-  }
-}

+ 0 - 43
src/core/org/apache/hadoop/metrics/file/package.html

@@ -1,43 +0,0 @@
-<html>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<body>
-Implementation of the metrics package that writes the metrics to a file.
-Programmers should not normally need to use this package directly. Instead
-they should use org.hadoop.metrics.
-
-<p/>
-These are the implementation specific factory attributes 
-(See ContextFactory.getFactory()):
-
-<dl>
-    <dt><i>contextName</i>.fileName</dt>
-    <dd>The path of the file to which metrics in context <i>contextName</i>
-    are to be appended.  If this attribute is not specified, the metrics
-    are written to standard output by default.</dd>
-    
-    <dt><i>contextName</i>.period</dt>
-    <dd>The period in seconds on which the metric data is written to the
-    file.</dd>
-    
-</dl>
-
-
-</body>
-</html>

+ 0 - 243
src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java

@@ -1,243 +0,0 @@
-/*
- * GangliaContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.ganglia;
-
-import java.io.IOException;
-import java.net.DatagramPacket;
-import java.net.DatagramSocket;
-import java.net.SocketAddress;
-import java.net.SocketException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.MetricsException;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
-import org.apache.hadoop.metrics.spi.OutputRecord;
-import org.apache.hadoop.metrics.spi.Util;
-
-/**
- * Context for sending metrics to Ganglia.
- * 
- */
-public class GangliaContext extends AbstractMetricsContext {
-    
-  private static final String PERIOD_PROPERTY = "period";
-  private static final String SERVERS_PROPERTY = "servers";
-  private static final String UNITS_PROPERTY = "units";
-  private static final String SLOPE_PROPERTY = "slope";
-  private static final String TMAX_PROPERTY = "tmax";
-  private static final String DMAX_PROPERTY = "dmax";
-    
-  private static final String DEFAULT_UNITS = "";
-  private static final String DEFAULT_SLOPE = "both";
-  private static final int DEFAULT_TMAX = 60;
-  private static final int DEFAULT_DMAX = 0;
-  private static final int DEFAULT_PORT = 8649;
-  private static final int BUFFER_SIZE = 1500;       // as per libgmond.c
-
-  private final Log LOG = LogFactory.getLog(this.getClass());    
-
-  private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
-    
-  static {
-    typeTable.put(String.class, "string");
-    typeTable.put(Byte.class, "int8");
-    typeTable.put(Short.class, "int16");
-    typeTable.put(Integer.class, "int32");
-    typeTable.put(Long.class, "float");
-    typeTable.put(Float.class, "float");
-  }
-    
-  private byte[] buffer = new byte[BUFFER_SIZE];
-  private int offset;
-    
-  private List<? extends SocketAddress> metricsServers;
-  private Map<String,String> unitsTable;
-  private Map<String,String> slopeTable;
-  private Map<String,String> tmaxTable;
-  private Map<String,String> dmaxTable;
-    
-  private DatagramSocket datagramSocket;
-    
-  /** Creates a new instance of GangliaContext */
-  public GangliaContext() {
-  }
-    
-  public void init(String contextName, ContextFactory factory) {
-    super.init(contextName, factory);
-        
-    String periodStr = getAttribute(PERIOD_PROPERTY);
-    if (periodStr != null) {
-      int period = 0;
-      try {
-        period = Integer.parseInt(periodStr);
-      } catch (NumberFormatException nfe) {
-      }
-      if (period <= 0) {
-        throw new MetricsException("Invalid period: " + periodStr);
-      }
-      setPeriod(period);
-    }
-        
-    metricsServers = 
-      Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); 
-        
-    unitsTable = getAttributeTable(UNITS_PROPERTY);
-    slopeTable = getAttributeTable(SLOPE_PROPERTY);
-    tmaxTable  = getAttributeTable(TMAX_PROPERTY);
-    dmaxTable  = getAttributeTable(DMAX_PROPERTY);
-        
-    try {
-      datagramSocket = new DatagramSocket();
-    }
-    catch (SocketException se) {
-      se.printStackTrace();
-    }
-  }
-
-  public void emitRecord(String contextName, String recordName,
-    OutputRecord outRec) 
-  throws IOException {
-    // Setup so that the records have the proper leader names so they are
-    // unambiguous at the ganglia level, and this prevents a lot of rework
-    StringBuilder sb = new StringBuilder();
-    sb.append(contextName);
-    sb.append('.');
-    sb.append(recordName);
-    sb.append('.');
-    int sbBaseLen = sb.length();
-
-    // emit each metric in turn
-    for (String metricName : outRec.getMetricNames()) {
-      Object metric = outRec.getMetric(metricName);
-      String type = typeTable.get(metric.getClass());
-      if (type != null) {
-        sb.append(metricName);
-        emitMetric(sb.toString(), type, metric.toString());
-        sb.setLength(sbBaseLen);
-      } else {
-        LOG.warn("Unknown metrics type: " + metric.getClass());
-      }
-    }
-  }
-    
-  private void emitMetric(String name, String type,  String value) 
-  throws IOException {
-    String units = getUnits(name);
-    int slope = getSlope(name);
-    int tmax = getTmax(name);
-    int dmax = getDmax(name);
-        
-    offset = 0;
-    xdr_int(0);             // metric_user_defined
-    xdr_string(type);
-    xdr_string(name);
-    xdr_string(value);
-    xdr_string(units);
-    xdr_int(slope);
-    xdr_int(tmax);
-    xdr_int(dmax);
-        
-    for (SocketAddress socketAddress : metricsServers) {
-      DatagramPacket packet = 
-        new DatagramPacket(buffer, offset, socketAddress);
-      datagramSocket.send(packet);
-    }
-  }
-    
-  private String getUnits(String metricName) {
-    String result = unitsTable.get(metricName);
-    if (result == null) {
-      result = DEFAULT_UNITS;
-    }
-    return result;
-  }
-    
-  private int getSlope(String metricName) {
-    String slopeString = slopeTable.get(metricName);
-    if (slopeString == null) {
-      slopeString = DEFAULT_SLOPE; 
-    }
-    return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c
-  }
-    
-  private int getTmax(String metricName) {
-    if (tmaxTable == null) {
-      return DEFAULT_TMAX;
-    }
-    String tmaxString = tmaxTable.get(metricName);
-    if (tmaxString == null) {
-      return DEFAULT_TMAX;
-    }
-    else {
-      return Integer.parseInt(tmaxString);
-    }
-  }
-    
-  private int getDmax(String metricName) {
-    String dmaxString = dmaxTable.get(metricName);
-    if (dmaxString == null) {
-      return DEFAULT_DMAX;
-    }
-    else {
-      return Integer.parseInt(dmaxString);
-    }
-  }
-    
-  /**
-   * Puts a string into the buffer by first writing the size of the string
-   * as an int, followed by the bytes of the string, padded if necessary to
-   * a multiple of 4.
-   */
-  private void xdr_string(String s) {
-    byte[] bytes = s.getBytes();
-    int len = bytes.length;
-    xdr_int(len);
-    System.arraycopy(bytes, 0, buffer, offset, len);
-    offset += len;
-    pad();
-  }
-
-  /**
-   * Pads the buffer with zero bytes up to the nearest multiple of 4.
-   */
-  private void pad() {
-    int newOffset = ((offset + 3) / 4) * 4;
-    while (offset < newOffset) {
-      buffer[offset++] = 0;
-    }
-  }
-        
-  /**
-   * Puts an integer into the buffer as 4 bytes, big-endian.
-   */
-  private void xdr_int(int i) {
-    buffer[offset++] = (byte)((i >> 24) & 0xff);
-    buffer[offset++] = (byte)((i >> 16) & 0xff);
-    buffer[offset++] = (byte)((i >> 8) & 0xff);
-    buffer[offset++] = (byte)(i & 0xff);
-  }
-}

+ 0 - 74
src/core/org/apache/hadoop/metrics/ganglia/package.html

@@ -1,74 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-<html>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<body>
-<!--
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
--->
-
-Implementation of the metrics package that sends metric data to 
-<a href="http://ganglia.sourceforge.net/">Ganglia</a>.
-Programmers should not normally need to use this package directly. Instead
-they should use org.hadoop.metrics.
-
-<p/>
-These are the implementation specific factory attributes 
-(See ContextFactory.getFactory()):
-
-<dl>
-    <dt><i>contextName</i>.servers</dt>
-    <dd>Space and/or comma separated sequence of servers to which UDP
-    messages should be sent.</dd>
-    
-    <dt><i>contextName</i>.period</dt>
-    <dd>The period in seconds on which the metric data is sent to the
-    server(s).</dd>
-    
-    <dt><i>contextName</i>.units.<i>recordName</i>.<i>metricName</i></dt>
-    <dd>The units for the specified metric in the specified record.</dd>
-    
-    <dt><i>contextName</i>.slope.<i>recordName</i>.<i>metricName</i></dt>
-    <dd>The slope for the specified metric in the specified record.</dd>
-    
-    <dt><i>contextName</i>.tmax.<i>recordName</i>.<i>metricName</i></dt>
-    <dd>The tmax for the specified metric in the specified record.</dd>
-    
-    <dt><i>contextName</i>.dmax.<i>recordName</i>.<i>metricName</i></dt>
-    <dd>The dmax for the specified metric in the specified record.</dd>
-    
-</dl>
-
-
-</body>
-</html>

+ 0 - 191
src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java

@@ -1,191 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.jvm;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryMXBean;
-import java.lang.management.MemoryUsage;
-import java.lang.management.ThreadInfo;
-import java.lang.management.ThreadMXBean;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-
-import static java.lang.Thread.State.*;
-import java.lang.management.GarbageCollectorMXBean;
-import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * Singleton class which reports Java Virtual Machine metrics to the metrics API.  
- * Any application can create an instance of this class in order to emit
- * Java VM metrics.  
- */
-public class JvmMetrics implements Updater {
-    
-    private static final float M = 1024*1024;
-    private static JvmMetrics theInstance = null;
-    private static Log log = LogFactory.getLog(JvmMetrics.class);
-    
-    private MetricsRecord metrics;
-    
-    // garbage collection counters
-    private long gcCount = 0;
-    private long gcTimeMillis = 0;
-    
-    // logging event counters
-    private long fatalCount = 0;
-    private long errorCount = 0;
-    private long warnCount  = 0;
-    private long infoCount  = 0;
-    
-    public synchronized static JvmMetrics init(String processName, String sessionId) {
-      return init(processName, sessionId, "metrics");
-    }
-    
-    public synchronized static JvmMetrics init(String processName, String sessionId,
-      String recordName) {
-        if (theInstance != null) {
-            log.info("Cannot initialize JVM Metrics with processName=" + 
-                     processName + ", sessionId=" + sessionId + 
-                     " - already initialized");
-        }
-        else {
-            log.info("Initializing JVM Metrics with processName=" 
-                    + processName + ", sessionId=" + sessionId);
-            theInstance = new JvmMetrics(processName, sessionId, recordName);
-        }
-        return theInstance;
-    }
-    
-    /** Creates a new instance of JvmMetrics */
-    private JvmMetrics(String processName, String sessionId,
-      String recordName) {
-        MetricsContext context = MetricsUtil.getContext("jvm");
-        metrics = MetricsUtil.createRecord(context, recordName);
-        metrics.setTag("processName", processName);
-        metrics.setTag("sessionId", sessionId);
-        context.registerUpdater(this);
-    }
-    
-    /**
-     * This will be called periodically (with the period being configuration
-     * dependent).
-     */
-    public void doUpdates(MetricsContext context) {
-        doMemoryUpdates();
-        doGarbageCollectionUpdates();
-        doThreadUpdates();
-        doEventCountUpdates();
-        metrics.update();
-    }
-    
-    private void doMemoryUpdates() {
-        MemoryMXBean memoryMXBean =
-               ManagementFactory.getMemoryMXBean();
-        MemoryUsage memNonHeap =
-                memoryMXBean.getNonHeapMemoryUsage();
-        MemoryUsage memHeap =
-                memoryMXBean.getHeapMemoryUsage();
-        metrics.setMetric("memNonHeapUsedM", memNonHeap.getUsed()/M);
-        metrics.setMetric("memNonHeapCommittedM", memNonHeap.getCommitted()/M);
-        metrics.setMetric("memHeapUsedM", memHeap.getUsed()/M);
-        metrics.setMetric("memHeapCommittedM", memHeap.getCommitted()/M);
-    }
-    
-    private void doGarbageCollectionUpdates() {
-        List<GarbageCollectorMXBean> gcBeans =
-                ManagementFactory.getGarbageCollectorMXBeans();
-        long count = 0;
-        long timeMillis = 0;
-        for (GarbageCollectorMXBean gcBean : gcBeans) {
-            count += gcBean.getCollectionCount();
-            timeMillis += gcBean.getCollectionTime();
-        }
-        metrics.incrMetric("gcCount", (int)(count - gcCount));
-        metrics.incrMetric("gcTimeMillis", (int)(timeMillis - gcTimeMillis));
-        
-        gcCount = count;
-        gcTimeMillis = timeMillis;
-    }
-    
-    private void doThreadUpdates() {
-        ThreadMXBean threadMXBean =
-                ManagementFactory.getThreadMXBean();
-        long threadIds[] = 
-                threadMXBean.getAllThreadIds();
-        ThreadInfo[] threadInfos =
-                threadMXBean.getThreadInfo(threadIds, 0);
-        
-        int threadsNew = 0;
-        int threadsRunnable = 0;
-        int threadsBlocked = 0;
-        int threadsWaiting = 0;
-        int threadsTimedWaiting = 0;
-        int threadsTerminated = 0;
-        
-        for (ThreadInfo threadInfo : threadInfos) {
-            // threadInfo is null if the thread is not alive or doesn't exist
-            if (threadInfo == null) continue;
-            Thread.State state = threadInfo.getThreadState();
-            if (state == NEW) {
-                threadsNew++;
-            } 
-            else if (state == RUNNABLE) {
-                threadsRunnable++;
-            }
-            else if (state == BLOCKED) {
-                threadsBlocked++;
-            }
-            else if (state == WAITING) {
-                threadsWaiting++;
-            } 
-            else if (state == TIMED_WAITING) {
-                threadsTimedWaiting++;
-            }
-            else if (state == TERMINATED) {
-                threadsTerminated++;
-            }
-        }
-        metrics.setMetric("threadsNew", threadsNew);
-        metrics.setMetric("threadsRunnable", threadsRunnable);
-        metrics.setMetric("threadsBlocked", threadsBlocked);
-        metrics.setMetric("threadsWaiting", threadsWaiting);
-        metrics.setMetric("threadsTimedWaiting", threadsTimedWaiting);
-        metrics.setMetric("threadsTerminated", threadsTerminated);
-    }
-    
-    private void doEventCountUpdates() {
-        long newFatal = EventCounter.getFatal();
-        long newError = EventCounter.getError();
-        long newWarn  = EventCounter.getWarn();
-        long newInfo  = EventCounter.getInfo();
-        
-        metrics.incrMetric("logFatal", (int)(newFatal - fatalCount));
-        metrics.incrMetric("logError", (int)(newError - errorCount));
-        metrics.incrMetric("logWarn",  (int)(newWarn - warnCount));
-        metrics.incrMetric("logInfo",  (int)(newInfo - infoCount));
-        
-        fatalCount = newFatal;
-        errorCount = newError;
-        warnCount  = newWarn;
-        infoCount  = newInfo;
-    }
-}

+ 0 - 159
src/core/org/apache/hadoop/metrics/package.html

@@ -1,159 +0,0 @@
-<html>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-  <head>
-    <title>org.apache.hadoop.metrics</title>
-  </head>
-<body>
-This package defines an API for reporting performance metric information.
-<p/>
-The API is abstract so that it can be implemented on top of
-a variety of metrics client libraries.  The choice of 
-client library is a configuration option, and different 
-modules within the same application can use
-different metrics implementation libraries.
-<p/>
-Sub-packages:
-<dl>
-    <dt><code>org.apache.hadoop.metrics.spi</code></dt>
-    <dd>The abstract Server Provider Interface package. Those wishing to
-    integrate the metrics API with a particular metrics client library should 
-    extend this package.</dd>
-    
-    <dt><code>org.apache.hadoop.metrics.file</code></dt>
-    <dd>An implementation package which writes the metric data to 
-    a file, or sends it to the standard output stream.</dd>
- 
-    <dt> <code>org.apache.hadoop.metrics.ganglia</code></dt>
-    <dd>An implementation package which sends metric data to 
-    <a href="http://ganglia.sourceforge.net/">Ganglia</a>.</dd>
-</dl>
-
-<h3>Introduction to the Metrics API</h3>
-
-Here is a simple example of how to use this package to report a single
-metric value:
-<pre>
-    private ContextFactory contextFactory = ContextFactory.getFactory();
-    
-    void reportMyMetric(float myMetric) {
-        MetricsContext myContext = contextFactory.getContext("myContext");
-        MetricsRecord myRecord = myContext.getRecord("myRecord");
-        myRecord.setMetric("myMetric", myMetric);
-        myRecord.update();
-    }
-</pre>
-  
-In this example there are three names:
-<dl>
-  <dt><i>myContext</i></dt>
-  <dd>The context name will typically identify either the application, or else a
-  module within an application or library.</dd>
-  
-  <dt><i>myRecord</i></dt>
-  <dd>The record name generally identifies some entity for which a set of
-  metrics are to be reported.  For example, you could have a record named 
-  "cacheStats" for reporting a number of statistics relating to the usage of
-  some cache in your application.</dd>
-  
-  <dt><i>myMetric</i></dt>
-  <dd>This identifies a particular metric.  For example, you might have metrics
-  named "cache_hits" and "cache_misses".
-  </dd>
-</dl>
-
-<h3>Tags</h3>
-
-In some cases it is useful to have multiple records with the same name. For 
-example, suppose that you want to report statistics about each disk on a computer. 
-In this case, the record name would be something like "diskStats", but you also
-need to identify the disk which is done by adding a <i>tag</i> to the record.
-The code could look something like this:
-<pre>
-    private MetricsRecord diskStats =
-            contextFactory.getContext("myContext").getRecord("diskStats");
-            
-    void reportDiskMetrics(String diskName, float diskBusy, float diskUsed) {
-        diskStats.setTag("diskName", diskName);
-        diskStats.setMetric("diskBusy", diskBusy);
-        diskStats.setMetric("diskUsed", diskUsed);
-        diskStats.update();
-    }
-</pre>
-
-<h3>Buffering and Callbacks</h3>
-
-Data is not sent immediately to the metrics system when 
-<code>MetricsRecord.update()</code> is called. Instead it is stored in an
-internal table, and the contents of the table are sent periodically.
-This can be important for two reasons:
-<ol>
-    <li>It means that a programmer is free to put calls to this API in an 
-    inner loop, since updates can be very frequent without slowing down
-    the application significantly.</li>
-    <li>Some implementations can gain efficiency by combining many metrics 
-    into a single UDP message.</li>
-</ol>
-
-The API provides a timer-based callback via the 
-<code>registerUpdater()</code> method.  The benefit of this
-versus using <code>java.util.Timer</code> is that the callbacks will be done 
-immediately before sending the data, making the data as current as possible.
-
-<h3>Configuration</h3>
-
-It is possible to programmatically examine and modify configuration data
-before creating a context, like this:
-<pre>
-    ContextFactory factory = ContextFactory.getFactory();
-    ... examine and/or modify factory attributes ...
-    MetricsContext context = factory.getContext("myContext");
-</pre>
-The factory attributes can be examined and modified using the following
-<code>ContextFactory</code>methods:
-<ul>
-    <li><code>Object getAttribute(String attributeName)</code></li>
-    <li><code>String[] getAttributeNames()</code></li>
-    <li><code>void setAttribute(String name, Object value)</code></li>
-    <li><code>void removeAttribute(attributeName)</code></li>
-</ul>
-
-<p/>
-<code>ContextFactory.getFactory()</code> initializes the factory attributes by
-reading the properties file <code>hadoop-metrics.properties</code> if it exists 
-on the class path.
-
-<p/>
-A factory attribute named:
-<pre>
-<i>contextName</i>.class
-</pre>
-should have as its value the fully qualified name of the class to be 
-instantiated by a call of the <code>CodeFactory</code> method
-<code>getContext(<i>contextName</i>)</code>.  If this factory attribute is not 
-specified, the default is to instantiate 
-<code>org.apache.hadoop.metrics.file.FileContext</code>.
-
-<p/>
-Other factory attributes are specific to a particular implementation of this 
-API and are documented elsewhere.  For example, configuration attributes for
-the file and Ganglia implementations can be found in the javadoc for 
-their respective packages.
-</body>
-</html>

+ 0 - 427
src/core/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java

@@ -1,427 +0,0 @@
-/*
- * AbstractMetricsContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.spi;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.TreeMap;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsException;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.Updater;
-
-/**
- * The main class of the Service Provider Interface.  This class should be
- * extended in order to integrate the Metrics API with a specific metrics
- * client library. <p/>
- *
- * This class implements the internal table of metric data, and the timer
- * on which data is to be sent to the metrics system.  Subclasses must
- * override the abstract <code>emitRecord</code> method in order to transmit
- * the data. <p/>
- */
-public abstract class AbstractMetricsContext implements MetricsContext {
-    
-  private int period = MetricsContext.DEFAULT_PERIOD;
-  private Timer timer = null;
-    
-  private Set<Updater> updaters = new HashSet<Updater>(1);
-  private volatile boolean isMonitoring = false;
-    
-  private ContextFactory factory = null;
-  private String contextName = null;
-    
-  static class TagMap extends TreeMap<String,Object> {
-    private static final long serialVersionUID = 3546309335061952993L;
-    TagMap() {
-      super();
-    }
-    TagMap(TagMap orig) {
-      super(orig);
-    }
-    /**
-     * Returns true if this tagmap contains every tag in other.
-     */
-    public boolean containsAll(TagMap other) {
-      for (Map.Entry<String,Object> entry : other.entrySet()) {
-        Object value = get(entry.getKey());
-        if (value == null || !value.equals(entry.getValue())) {
-          // either key does not exist here, or the value is different
-          return false;
-        }
-      }
-      return true;
-    }
-  }
-  
-  static class MetricMap extends TreeMap<String,Number> {
-    private static final long serialVersionUID = -7495051861141631609L;
-  }
-            
-  static class RecordMap extends HashMap<TagMap,MetricMap> {
-    private static final long serialVersionUID = 259835619700264611L;
-  }
-    
-  private Map<String,RecordMap> bufferedData = new HashMap<String,RecordMap>();
-    
-
-  /**
-   * Creates a new instance of AbstractMetricsContext
-   */
-  protected AbstractMetricsContext() {
-  }
-    
-  /**
-   * Initializes the context.
-   */
-  public void init(String contextName, ContextFactory factory) 
-  {
-    this.contextName = contextName;
-    this.factory = factory;
-  }
-    
-  /**
-   * Convenience method for subclasses to access factory attributes.
-   */
-  protected String getAttribute(String attributeName) {
-    String factoryAttribute = contextName + "." + attributeName;
-    return (String) factory.getAttribute(factoryAttribute);  
-  }
-    
-  /**
-   * Returns an attribute-value map derived from the factory attributes
-   * by finding all factory attributes that begin with 
-   * <i>contextName</i>.<i>tableName</i>.  The returned map consists of
-   * those attributes with the contextName and tableName stripped off.
-   */
-  protected Map<String,String> getAttributeTable(String tableName) {
-    String prefix = contextName + "." + tableName + ".";
-    Map<String,String> result = new HashMap<String,String>();
-    for (String attributeName : factory.getAttributeNames()) {
-      if (attributeName.startsWith(prefix)) {
-        String name = attributeName.substring(prefix.length());
-        String value = (String) factory.getAttribute(attributeName);
-        result.put(name, value);
-      }
-    }
-    return result;
-  }
-    
-  /**
-   * Returns the context name.
-   */
-  public String getContextName() {
-    return contextName;
-  }
-    
-  /**
-   * Returns the factory by which this context was created.
-   */
-  public ContextFactory getContextFactory() {
-    return factory;
-  }
-    
-  /**
-   * Starts or restarts monitoring, the emitting of metrics records.
-   */
-  public synchronized void startMonitoring()
-    throws IOException {
-    if (!isMonitoring) {
-      startTimer();
-      isMonitoring = true;
-    }
-  }
-    
-  /**
-   * Stops monitoring.  This does not free buffered data. 
-   * @see #close()
-   */
-  public synchronized void stopMonitoring() {
-    if (isMonitoring) {
-      stopTimer();
-      isMonitoring = false;
-    }
-  }
-    
-  /**
-   * Returns true if monitoring is currently in progress.
-   */
-  public boolean isMonitoring() {
-    return isMonitoring;
-  }
-    
-  /**
-   * Stops monitoring and frees buffered data, returning this
-   * object to its initial state.  
-   */
-  public synchronized void close() {
-    stopMonitoring();
-    clearUpdaters();
-  } 
-    
-  /**
-   * Creates a new AbstractMetricsRecord instance with the given <code>recordName</code>.
-   * Throws an exception if the metrics implementation is configured with a fixed
-   * set of record names and <code>recordName</code> is not in that set.
-   * 
-   * @param recordName the name of the record
-   * @throws MetricsException if recordName conflicts with configuration data
-   */
-  public final synchronized MetricsRecord createRecord(String recordName) {
-    if (bufferedData.get(recordName) == null) {
-      bufferedData.put(recordName, new RecordMap());
-    }
-    return newRecord(recordName);
-  }
-    
-  /**
-   * Subclasses should override this if they subclass MetricsRecordImpl.
-   * @param recordName the name of the record
-   * @return newly created instance of MetricsRecordImpl or subclass
-   */
-  protected MetricsRecord newRecord(String recordName) {
-    return new MetricsRecordImpl(recordName, this);
-  }
-    
-  /**
-   * Registers a callback to be called at time intervals determined by
-   * the configuration.
-   *
-   * @param updater object to be run periodically; it should update
-   * some metrics records 
-   */
-  public synchronized void registerUpdater(final Updater updater) {
-    if (!updaters.contains(updater)) {
-      updaters.add(updater);
-    }
-  }
-    
-  /**
-   * Removes a callback, if it exists.
-   *
-   * @param updater object to be removed from the callback list
-   */
-  public synchronized void unregisterUpdater(Updater updater) {
-    updaters.remove(updater);
-  }
-    
-  private synchronized void clearUpdaters() {
-    updaters.clear();
-  }
-    
-  /**
-   * Starts timer if it is not already started
-   */
-  private synchronized void startTimer() {
-    if (timer == null) {
-      timer = new Timer("Timer thread for monitoring " + getContextName(), 
-                        true);
-      TimerTask task = new TimerTask() {
-          public void run() {
-            try {
-              timerEvent();
-            }
-            catch (IOException ioe) {
-              ioe.printStackTrace();
-            }
-          }
-        };
-      long millis = period * 1000;
-      timer.scheduleAtFixedRate(task, millis, millis);
-    }
-  }
-    
-  /**
-   * Stops timer if it is running
-   */
-  private synchronized void stopTimer() {
-    if (timer != null) {
-      timer.cancel();
-      timer = null;
-    }
-  }
-    
-  /**
-   * Timer callback.
-   */
-  private void timerEvent() throws IOException {
-    if (isMonitoring) {
-      Collection<Updater> myUpdaters;
-      synchronized (this) {
-        myUpdaters = new ArrayList<Updater>(updaters);
-      }     
-      // Run all the registered updates without holding a lock
-      // on this context
-      for (Updater updater : myUpdaters) {
-        try {
-          updater.doUpdates(this);
-        }
-        catch (Throwable throwable) {
-          throwable.printStackTrace();
-        }
-      }
-      emitRecords();
-    }
-  }
-    
-  /**
-   *  Emits the records.
-   */
-  private synchronized void emitRecords() throws IOException {
-    for (String recordName : bufferedData.keySet()) {
-      RecordMap recordMap = bufferedData.get(recordName);
-      synchronized (recordMap) {
-        Set<Entry<TagMap, MetricMap>> entrySet = recordMap.entrySet ();
-        for (Entry<TagMap, MetricMap> entry : entrySet) {
-          OutputRecord outRec = new OutputRecord(entry.getKey(), entry.getValue());
-          emitRecord(contextName, recordName, outRec);
-        }
-      }
-    }
-    flush();
-  }
-
-  /**
-   * Sends a record to the metrics system.
-   */
-  protected abstract void emitRecord(String contextName, String recordName, 
-                                     OutputRecord outRec) throws IOException;
-    
-  /**
-   * Called each period after all records have been emitted, this method does nothing.
-   * Subclasses may override it in order to perform some kind of flush.
-   */
-  protected void flush() throws IOException {
-  }
-    
-  /**
-   * Called by MetricsRecordImpl.update().  Creates or updates a row in
-   * the internal table of metric data.
-   */
-  protected void update(MetricsRecordImpl record) {
-    String recordName = record.getRecordName();
-    TagMap tagTable = record.getTagTable();
-    Map<String,MetricValue> metricUpdates = record.getMetricTable();
-        
-    RecordMap recordMap = getRecordMap(recordName);
-    synchronized (recordMap) {
-      MetricMap metricMap = recordMap.get(tagTable);
-      if (metricMap == null) {
-        metricMap = new MetricMap();
-        TagMap tagMap = new TagMap(tagTable); // clone tags
-        recordMap.put(tagMap, metricMap);
-      }
-
-      Set<Entry<String, MetricValue>> entrySet = metricUpdates.entrySet();
-      for (Entry<String, MetricValue> entry : entrySet) {
-        String metricName = entry.getKey ();
-        MetricValue updateValue = entry.getValue ();
-        Number updateNumber = updateValue.getNumber();
-        Number currentNumber = metricMap.get(metricName);
-        if (currentNumber == null || updateValue.isAbsolute()) {
-          metricMap.put(metricName, updateNumber);
-        }
-        else {
-          Number newNumber = sum(updateNumber, currentNumber);
-          metricMap.put(metricName, newNumber);
-        }
-      }
-    }
-  }
-    
-  private synchronized RecordMap getRecordMap(String recordName) {
-    return bufferedData.get(recordName);
-  }
-    
-  /**
-   * Adds two numbers, coercing the second to the type of the first.
-   *
-   */
-  private Number sum(Number a, Number b) {
-    if (a instanceof Integer) {
-      return Integer.valueOf(a.intValue() + b.intValue());
-    }
-    else if (a instanceof Float) {
-      return new Float(a.floatValue() + b.floatValue());
-    }
-    else if (a instanceof Short) {
-      return Short.valueOf((short)(a.shortValue() + b.shortValue()));
-    }
-    else if (a instanceof Byte) {
-      return Byte.valueOf((byte)(a.byteValue() + b.byteValue()));
-    }
-    else if (a instanceof Long) {
-      return Long.valueOf((a.longValue() + b.longValue()));
-    }
-    else {
-      // should never happen
-      throw new MetricsException("Invalid number type");
-    }
-            
-  }
-    
-  /**
-   * Called by MetricsRecordImpl.remove().  Removes all matching rows in
-   * the internal table of metric data.  A row matches if it has the same
-   * tag names and values as record, but it may also have additional
-   * tags.
-   */    
-  protected void remove(MetricsRecordImpl record) {
-    String recordName = record.getRecordName();
-    TagMap tagTable = record.getTagTable();
-        
-    RecordMap recordMap = getRecordMap(recordName);
-    synchronized (recordMap) {
-      Iterator<TagMap> it = recordMap.keySet().iterator();
-      while (it.hasNext()) {
-        TagMap rowTags = it.next();
-        if (rowTags.containsAll(tagTable)) {
-          it.remove();
-        }
-      }
-    }
-  }
-    
-  /**
-   * Returns the timer period.
-   */
-  public int getPeriod() {
-    return period;
-  }
-    
-  /**
-   * Sets the timer period
-   */
-  protected void setPeriod(int period) {
-    this.period = period;
-  }
-}

+ 0 - 186
src/core/org/apache/hadoop/metrics/spi/CompositeContext.java

@@ -1,186 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.spi;
-
-import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.util.ArrayList;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsException;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-
-public class CompositeContext extends AbstractMetricsContext {
-
-  private static final Log LOG = LogFactory.getLog(CompositeContext.class);
-  private static final String ARITY_LABEL = "arity";
-  private static final String SUB_FMT = "%s.sub%d";
-  private final ArrayList<MetricsContext> subctxt =
-    new ArrayList<MetricsContext>();
-
-  public CompositeContext() {
-  }
-
-  public void init(String contextName, ContextFactory factory) {
-    super.init(contextName, factory);
-    int nKids;
-    try {
-      String sKids = getAttribute(ARITY_LABEL);
-      nKids = Integer.valueOf(sKids);
-    } catch (Exception e) {
-      LOG.error("Unable to initialize composite metric " + contextName +
-                ": could not init arity", e);
-      return;
-    }
-    for (int i = 0; i < nKids; ++i) {
-      MetricsContext ctxt = MetricsUtil.getContext(
-          String.format(SUB_FMT, contextName, i), contextName);
-      if (null != ctxt) {
-        subctxt.add(ctxt);
-      }
-    }
-  }
-
-  @Override
-  public MetricsRecord newRecord(String recordName) {
-    return (MetricsRecord) Proxy.newProxyInstance(
-        MetricsRecord.class.getClassLoader(),
-        new Class[] { MetricsRecord.class },
-        new MetricsRecordDelegator(recordName, subctxt));
-  }
-
-  @Override
-  protected void emitRecord(String contextName, String recordName,
-      OutputRecord outRec) throws IOException {
-    for (MetricsContext ctxt : subctxt) {
-      try {
-        ((AbstractMetricsContext)ctxt).emitRecord(
-          contextName, recordName, outRec);
-        if (contextName == null || recordName == null || outRec == null) {
-          throw new IOException(contextName + ":" + recordName + ":" + outRec);
-        }
-      } catch (IOException e) {
-        LOG.warn("emitRecord failed: " + ctxt.getContextName(), e);
-      }
-    }
-  }
-
-  @Override
-  protected void flush() throws IOException {
-    for (MetricsContext ctxt : subctxt) {
-      try {
-        ((AbstractMetricsContext)ctxt).flush();
-      } catch (IOException e) {
-        LOG.warn("flush failed: " + ctxt.getContextName(), e);
-      }
-    }
-  }
-
-  @Override
-  public void startMonitoring() throws IOException {
-    for (MetricsContext ctxt : subctxt) {
-      try {
-        ctxt.startMonitoring();
-      } catch (IOException e) {
-        LOG.warn("startMonitoring failed: " + ctxt.getContextName(), e);
-      }
-    }
-  }
-
-  @Override
-  public void stopMonitoring() {
-    for (MetricsContext ctxt : subctxt) {
-      ctxt.stopMonitoring();
-    }
-  }
-
-  /**
-   * Return true if all subcontexts are monitoring.
-   */
-  @Override
-  public boolean isMonitoring() {
-    boolean ret = true;
-    for (MetricsContext ctxt : subctxt) {
-      ret &= ctxt.isMonitoring();
-    }
-    return ret;
-  }
-
-  @Override
-  public void close() {
-    for (MetricsContext ctxt : subctxt) {
-      ctxt.close();
-    }
-  }
-
-  @Override
-  public void registerUpdater(Updater updater) {
-    for (MetricsContext ctxt : subctxt) {
-      ctxt.registerUpdater(updater);
-    }
-  }
-
-  @Override
-  public void unregisterUpdater(Updater updater) {
-    for (MetricsContext ctxt : subctxt) {
-      ctxt.unregisterUpdater(updater);
-    }
-  }
-
-  private static class MetricsRecordDelegator implements InvocationHandler {
-    private static final Method m_getRecordName = initMethod();
-    private static Method initMethod() {
-      try {
-        return MetricsRecord.class.getMethod("getRecordName", new Class[0]);
-      } catch (Exception e) {
-        throw new RuntimeException("Internal error", e);
-      }
-    }
-
-    private final String recordName;
-    private final ArrayList<MetricsRecord> subrecs;
-
-    MetricsRecordDelegator(String recordName, ArrayList<MetricsContext> ctxts) {
-      this.recordName = recordName;
-      this.subrecs = new ArrayList<MetricsRecord>(ctxts.size());
-      for (MetricsContext ctxt : ctxts) {
-        subrecs.add(ctxt.createRecord(recordName));
-      }
-    }
-
-    public Object invoke(Object p, Method m, Object[] args) throws Throwable {
-      if (m_getRecordName.equals(m)) {
-        return recordName;
-      }
-      assert Void.TYPE.equals(m.getReturnType());
-      for (MetricsRecord rec : subrecs) {
-        m.invoke(rec, args);
-      }
-      return null;
-    }
-  }
-
-}

+ 0 - 275
src/core/org/apache/hadoop/metrics/spi/MetricsRecordImpl.java

@@ -1,275 +0,0 @@
-/*
- * MetricsRecordImpl.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.spi;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
-
-/**
- * An implementation of MetricsRecord.  Keeps a back-pointer to the context
- * from which it was created, and delegates back to it on <code>update</code>
- * and <code>remove()</code>.
- */
-public class MetricsRecordImpl implements MetricsRecord {
-    
-  private TagMap tagTable = new TagMap();
-  private Map<String,MetricValue> metricTable = new LinkedHashMap<String,MetricValue>();
-    
-  private String recordName;
-  private AbstractMetricsContext context;
-    
-    
-  /** Creates a new instance of FileRecord */
-  protected MetricsRecordImpl(String recordName, AbstractMetricsContext context)
-  {
-    this.recordName = recordName;
-    this.context = context;
-  }
-    
-  /**
-   * Returns the record name. 
-   *
-   * @return the record name
-   */
-  public String getRecordName() {
-    return recordName;
-  }
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public void setTag(String tagName, String tagValue) {
-    if (tagValue == null) {
-      tagValue = "";
-    }
-    tagTable.put(tagName, tagValue);
-  }
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public void setTag(String tagName, int tagValue) {
-    tagTable.put(tagName, Integer.valueOf(tagValue));
-  }
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public void setTag(String tagName, long tagValue) {
-    tagTable.put(tagName, Long.valueOf(tagValue));
-  }
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public void setTag(String tagName, short tagValue) {
-    tagTable.put(tagName, Short.valueOf(tagValue));
-  }
-    
-  /**
-   * Sets the named tag to the specified value.
-   *
-   * @param tagName name of the tag
-   * @param tagValue new value of the tag
-   * @throws MetricsException if the tagName conflicts with the configuration
-   */
-  public void setTag(String tagName, byte tagValue) {
-    tagTable.put(tagName, Byte.valueOf(tagValue));
-  }
-    
-  /**
-   * Removes any tag of the specified name.
-   */
-  public void removeTag(String tagName) {
-    tagTable.remove(tagName);
-  }
-  
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void setMetric(String metricName, int metricValue) {
-    setAbsolute(metricName, Integer.valueOf(metricValue));
-  }
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void setMetric(String metricName, long metricValue) {
-    setAbsolute(metricName, Long.valueOf(metricValue));
-  }
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void setMetric(String metricName, short metricValue) {
-    setAbsolute(metricName, Short.valueOf(metricValue));
-  }
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void setMetric(String metricName, byte metricValue) {
-    setAbsolute(metricName, Byte.valueOf(metricValue));
-  }
-    
-  /**
-   * Sets the named metric to the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue new value of the metric
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void setMetric(String metricName, float metricValue) {
-    setAbsolute(metricName, new Float(metricValue));
-  }
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void incrMetric(String metricName, int metricValue) {
-    setIncrement(metricName, Integer.valueOf(metricValue));
-  }
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void incrMetric(String metricName, long metricValue) {
-    setIncrement(metricName, Long.valueOf(metricValue));
-  }
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void incrMetric(String metricName, short metricValue) {
-    setIncrement(metricName, Short.valueOf(metricValue));
-  }
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void incrMetric(String metricName, byte metricValue) {
-    setIncrement(metricName, Byte.valueOf(metricValue));
-  }
-    
-  /**
-   * Increments the named metric by the specified value.
-   *
-   * @param metricName name of the metric
-   * @param metricValue incremental value
-   * @throws MetricsException if the metricName or the type of the metricValue 
-   * conflicts with the configuration
-   */
-  public void incrMetric(String metricName, float metricValue) {
-    setIncrement(metricName, new Float(metricValue));
-  }
-    
-  private void setAbsolute(String metricName, Number metricValue) {
-    metricTable.put(metricName, new MetricValue(metricValue, MetricValue.ABSOLUTE));
-  }
-    
-  private void setIncrement(String metricName, Number metricValue) {
-    metricTable.put(metricName, new MetricValue(metricValue, MetricValue.INCREMENT));
-  }
-    
-  /**
-   * Updates the table of buffered data which is to be sent periodically.
-   * If the tag values match an existing row, that row is updated; 
-   * otherwise, a new row is added.
-   */
-  public void update() {
-    context.update(this);
-  }
-    
-  /**
-   * Removes the row, if it exists, in the buffered data table having tags 
-   * that equal the tags that have been set on this record. 
-   */
-  public void remove() {
-    context.remove(this);
-  }
-
-  TagMap getTagTable() {
-    return tagTable;
-  }
-
-  Map<String, MetricValue> getMetricTable() {
-    return metricTable;
-  }
-}

+ 0 - 83
src/core/org/apache/hadoop/metrics/spi/NullContextWithUpdateThread.java

@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.spi;
-
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.MetricsException;
-
-/**
- * A null context which has a thread calling 
- * periodically when monitoring is started. This keeps the data sampled 
- * correctly.
- * In all other respects, this is like the NULL context: No data is emitted.
- * This is suitable for Monitoring systems like JMX which reads the metrics
- *  when someone reads the data from JMX.
- * 
- * The default impl of start and stop monitoring:
- *  is the AbstractMetricsContext is good enough.
- * 
- */
-
-public class NullContextWithUpdateThread extends AbstractMetricsContext {
-  
-  private static final String PERIOD_PROPERTY = "period";
-    
-  /** Creates a new instance of NullContextWithUpdateThread */
-  public NullContextWithUpdateThread() {
-  }
-  
-  public void init(String contextName, ContextFactory factory) {
-    super.init(contextName, factory);
-    
-    // If period is specified, use it, otherwise the default is good enough
-        
-    String periodStr = getAttribute(PERIOD_PROPERTY);
-    if (periodStr != null) {
-      int period = 0;
-      try {
-        period = Integer.parseInt(periodStr);
-      } catch (NumberFormatException nfe) {
-      }
-      if (period <= 0) {
-        throw new MetricsException("Invalid period: " + periodStr);
-      }
-      setPeriod(period);
-    }
-  }
-   
-    
-  /**
-   * Do-nothing version of emitRecord
-   */
-  protected void emitRecord(String contextName, String recordName,
-                            OutputRecord outRec) 
-  {}
-    
-  /**
-   * Do-nothing version of update
-   */
-  protected void update(MetricsRecordImpl record) {
-  }
-    
-  /**
-   * Do-nothing version of remove
-   */
-  protected void remove(MetricsRecordImpl record) {
-  }
-}

+ 0 - 72
src/core/org/apache/hadoop/metrics/spi/OutputRecord.java

@@ -1,72 +0,0 @@
-/*
- * OutputRecord.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.spi;
-
-import java.util.Collections;
-import java.util.Set;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
-import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
-
-/**
- * Represents a record of metric data to be sent to a metrics system.
- */
-public class OutputRecord {
-    
-  private TagMap tagMap;
-  private MetricMap metricMap;
-    
-  /** Creates a new instance of OutputRecord */
-  OutputRecord(TagMap tagMap, MetricMap metricMap) {
-    this.tagMap = tagMap;
-    this.metricMap = metricMap;
-  }
-    
-  /**
-   * Returns the set of tag names
-   */
-  public Set<String> getTagNames() {
-    return Collections.unmodifiableSet(tagMap.keySet());
-  }
-    
-  /**
-   * Returns a tag object which is can be a String, Integer, Short or Byte.
-   *
-   * @return the tag value, or null if there is no such tag
-   */
-  public Object getTag(String name) {
-    return tagMap.get(name);
-  }
-    
-  /**
-   * Returns the set of metric names.
-   */
-  public Set<String> getMetricNames() {
-    return Collections.unmodifiableSet(metricMap.keySet());
-  }
-    
-  /**
-   * Returns the metric object which can be a Float, Integer, Short or Byte.
-   */
-  public Number getMetric(String name) {
-    return metricMap.get(name);
-  }
-    
-}

+ 0 - 67
src/core/org/apache/hadoop/metrics/spi/Util.java

@@ -1,67 +0,0 @@
-/*
- * Util.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.hadoop.metrics.spi;
-
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Static utility methods
- */
-public class Util {
-    
-  /**
-   * This class is not intended to be instantiated
-   */
-  private Util() {}
-    
-  /**
-   * Parses a space and/or comma separated sequence of server specifications
-   * of the form <i>hostname</i> or <i>hostname:port</i>.  If 
-   * the specs string is null, defaults to localhost:defaultPort.
-   * 
-   * @return a list of InetSocketAddress objects.
-   */
-  public static List<InetSocketAddress> parse(String specs, int defaultPort) {
-    List<InetSocketAddress> result = new ArrayList<InetSocketAddress>(1);
-    if (specs == null) {
-      result.add(new InetSocketAddress("localhost", defaultPort));
-    }
-    else {
-      String[] specStrings = specs.split("[ ,]+");
-      for (String specString : specStrings) {
-        int colon = specString.indexOf(':');
-        if (colon < 0 || colon == specString.length() - 1) {
-          result.add(new InetSocketAddress(specString, defaultPort));
-        } else {
-          String hostname = specString.substring(0, colon);
-          int port = Integer.parseInt(specString.substring(colon+1));
-          result.add(new InetSocketAddress(hostname, port));
-        }
-      }
-    }
-    return result;
-  }
-    
-}

+ 0 - 36
src/core/org/apache/hadoop/metrics/spi/package.html

@@ -1,36 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-<html>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-  <head>
-    <title>org.apache.hadoop.metrics.spi</title>
-  </head>
-  <body>
-The Service Provider Interface for the Metrics API.  This package provides
-an interface allowing a variety of metrics reporting implementations to be
-plugged in to the Metrics API.  Examples of such implementations can be found 
-in the packages <code>org.apache.hadoop.metrics.file</code> and
-<code>org.apache.hadoop.metrics.ganglia</code>.<p/>
-
-Plugging in an implementation involves writing a concrete subclass of 
-<code>AbstractMetricsContext</code>.  The subclass should get its
- configuration information using the <code>getAttribute(<i>attributeName</i>)</code>
- method.
-  </body>
-</html>

+ 0 - 226
src/core/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java

@@ -1,226 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.management.Attribute;
-import javax.management.AttributeList;
-import javax.management.AttributeNotFoundException;
-import javax.management.DynamicMBean;
-import javax.management.InvalidAttributeValueException;
-import javax.management.MBeanAttributeInfo;
-import javax.management.MBeanException;
-import javax.management.MBeanInfo;
-import javax.management.MBeanOperationInfo;
-import javax.management.ReflectionException;
-
-import org.apache.hadoop.metrics.MetricsUtil;
-
-
-
-/**
- * This abstract base class facilitates creating dynamic mbeans automatically from
- * metrics. 
- * The metrics constructors registers metrics in a registry. 
- * Different categories of metrics should be in differnt classes with their own
- * registry (as in NameNodeMetrics and DataNodeMetrics).
- * Then the MBean can be created passing the registry to the constructor.
- * The MBean should be then registered using a mbean name (example):
- *  MetricsHolder myMetrics = new MetricsHolder(); // has metrics and registry
- *  MetricsTestMBean theMBean = new MetricsTestMBean(myMetrics.mregistry);
- *  ObjectName mbeanName = MBeanUtil.registerMBean("ServiceFoo",
- *                "TestStatistics", theMBean);
- * 
- *
- */
-public abstract class MetricsDynamicMBeanBase implements DynamicMBean {
-  private final static String AVG_TIME = "AvgTime";
-  private final static String MIN_TIME = "MinTime";
-  private final static String MAX_TIME = "MaxTime";
-  private final static String NUM_OPS = "NumOps";
-  private final static String RESET_ALL_MIN_MAX_OP = "resetAllMinMax";
-  private MetricsRegistry metricsRegistry;
-  private MBeanInfo mbeanInfo;
-  private Map<String, MetricsBase> metricsRateAttributeMod;
-  private int numEntriesInRegistry = 0;
-  private String mbeanDescription;
-  
-  protected MetricsDynamicMBeanBase(final MetricsRegistry mr, final String aMBeanDescription) {
-    metricsRegistry = mr;
-    mbeanDescription = aMBeanDescription;
-    createMBeanInfo();
-  }
-  
-  private void updateMbeanInfoIfMetricsListChanged()  {
-    if (numEntriesInRegistry != metricsRegistry.size())
-      createMBeanInfo();
-  }
-  
-  private void createMBeanInfo() {
-    metricsRateAttributeMod = new HashMap<String, MetricsBase>();
-    boolean needsMinMaxResetOperation = false;
-    List<MBeanAttributeInfo> attributesInfo = new ArrayList<MBeanAttributeInfo>();
-    MBeanOperationInfo[] operationsInfo = null;
-    numEntriesInRegistry = metricsRegistry.size();
-    
-    for (MetricsBase o : metricsRegistry.getMetricsList()) {
-
-      if (MetricsTimeVaryingRate.class.isInstance(o)) {
-        // For each of the metrics there are 3 different attributes
-        attributesInfo.add(new MBeanAttributeInfo(o.getName() + NUM_OPS, "java.lang.Integer",
-            o.getDescription(), true, false, false));
-        attributesInfo.add(new MBeanAttributeInfo(o.getName() + AVG_TIME, "java.lang.Long",
-            o.getDescription(), true, false, false));
-        attributesInfo.add(new MBeanAttributeInfo(o.getName() + MIN_TIME, "java.lang.Long",
-            o.getDescription(), true, false, false));
-        attributesInfo.add(new MBeanAttributeInfo(o.getName() + MAX_TIME, "java.lang.Long",
-            o.getDescription(), true, false, false));
-        needsMinMaxResetOperation = true;  // the min and max can be reset.
-        
-        // Note the special attributes (AVG_TIME, MIN_TIME, ..) are derived from metrics 
-        // Rather than check for the suffix we store them in a map.
-        metricsRateAttributeMod.put(o.getName() + NUM_OPS, o);
-        metricsRateAttributeMod.put(o.getName() + AVG_TIME, o);
-        metricsRateAttributeMod.put(o.getName() + MIN_TIME, o);
-        metricsRateAttributeMod.put(o.getName() + MAX_TIME, o);
-        
-      }  else if ( MetricsIntValue.class.isInstance(o) || MetricsTimeVaryingInt.class.isInstance(o) ) {
-        attributesInfo.add(new MBeanAttributeInfo(o.getName(), "java.lang.Integer",
-            o.getDescription(), true, false, false)); 
-      } else if ( MetricsLongValue.class.isInstance(o) || MetricsTimeVaryingLong.class.isInstance(o) ) {
-        attributesInfo.add(new MBeanAttributeInfo(o.getName(), "java.lang.Long",
-            o.getDescription(), true, false, false));     
-      } else {
-        MetricsUtil.LOG.error("unknown metrics type: " + o.getClass().getName());
-      }
-
-      if (needsMinMaxResetOperation) {
-        operationsInfo = new MBeanOperationInfo[] {
-            new MBeanOperationInfo(RESET_ALL_MIN_MAX_OP, "Reset (zero) All Min Max",
-                    null, "void", MBeanOperationInfo.ACTION) };
-      }
-    }
-    MBeanAttributeInfo[] attrArray = new MBeanAttributeInfo[attributesInfo.size()];
-    mbeanInfo =  new MBeanInfo(this.getClass().getName(), mbeanDescription, 
-        attributesInfo.toArray(attrArray), null, operationsInfo, null);
-  }
-  
-  @Override
-  public Object getAttribute(String attributeName) throws AttributeNotFoundException,
-      MBeanException, ReflectionException {
-    if (attributeName == null || attributeName.equals("")) 
-      throw new IllegalArgumentException();
-    
-    updateMbeanInfoIfMetricsListChanged();
-    
-    Object o = metricsRateAttributeMod.get(attributeName);
-    if (o == null) {
-      o = metricsRegistry.get(attributeName);
-    }
-    if (o == null)
-      throw new AttributeNotFoundException();
-    
-    if (o instanceof MetricsIntValue)
-      return ((MetricsIntValue) o).get();
-    else if (o instanceof MetricsLongValue)
-      return ((MetricsLongValue) o).get();
-    else if (o instanceof MetricsTimeVaryingInt)
-      return ((MetricsTimeVaryingInt) o).getPreviousIntervalValue();
-    else if (o instanceof MetricsTimeVaryingLong)
-      return ((MetricsTimeVaryingLong) o).getPreviousIntervalValue();
-    else if (o instanceof MetricsTimeVaryingRate) {
-      MetricsTimeVaryingRate or = (MetricsTimeVaryingRate) o;
-      if (attributeName.endsWith(NUM_OPS))
-        return or.getPreviousIntervalNumOps();
-      else if (attributeName.endsWith(AVG_TIME))
-        return or.getPreviousIntervalAverageTime();
-      else if (attributeName.endsWith(MIN_TIME))
-        return or.getMinTime();
-      else if (attributeName.endsWith(MAX_TIME))
-        return or.getMaxTime();
-      else {
-        MetricsUtil.LOG.error("Unexpected attrubute suffix");
-        throw new AttributeNotFoundException();
-      }
-    } else {
-        MetricsUtil.LOG.error("unknown metrics type: " + o.getClass().getName());
-        throw new AttributeNotFoundException();
-    }
-  }
-
-  @Override
-  public AttributeList getAttributes(String[] attributeNames) {
-    if (attributeNames == null || attributeNames.length == 0) 
-      throw new IllegalArgumentException();
-    
-    updateMbeanInfoIfMetricsListChanged();
-    
-    AttributeList result = new AttributeList(attributeNames.length);
-    for (String iAttributeName : attributeNames) {
-      try {
-        Object value = getAttribute(iAttributeName);
-        result.add(new Attribute(iAttributeName, value));
-      } catch (Exception e) {
-        continue;
-      } 
-    }
-    return result;
-  }
-
-  @Override
-  public MBeanInfo getMBeanInfo() {
-    return mbeanInfo;
-  }
-
-  @Override
-  public Object invoke(String actionName, Object[] parms, String[] signature)
-      throws MBeanException, ReflectionException {
-    
-    if (actionName == null || actionName.equals("")) 
-      throw new IllegalArgumentException();
-    
-    
-    // Right now we support only one fixed operation (if it applies)
-    if (!(actionName.equals(RESET_ALL_MIN_MAX_OP)) || 
-        mbeanInfo.getOperations().length != 1) {
-      throw new ReflectionException(new NoSuchMethodException(actionName));
-    }
-    for (MetricsBase m : metricsRegistry.getMetricsList())  {
-      if ( MetricsTimeVaryingRate.class.isInstance(m) ) {
-        MetricsTimeVaryingRate.class.cast(m).resetMinMax();
-      }
-    }
-    return null;
-  }
-
-  @Override
-  public void setAttribute(Attribute attribute)
-      throws AttributeNotFoundException, InvalidAttributeValueException,
-      MBeanException, ReflectionException {
-    throw new ReflectionException(new NoSuchMethodException("set" + attribute));
-  }
-
-  @Override
-  public AttributeList setAttributes(AttributeList attributes) {
-    return null;
-  }
-}

+ 0 - 104
src/core/org/apache/hadoop/metrics/util/MetricsIntValue.java

@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.util.StringUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The MetricsIntValue class is for a metric that is not time varied
- * but changes only when it is set. 
- * Each time its value is set, it is published only *once* at the next update
- * call.
- *
- */
-public class MetricsIntValue extends MetricsBase {  
-
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
-
-  private int value;
-  private boolean changed;
-  
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   */
-  public MetricsIntValue(final String nam, final MetricsRegistry registry, final String description) {
-    super(nam, description);
-    value = 0;
-    changed = false;
-    registry.add(nam, this);
-  }
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsIntValue(final String nam, MetricsRegistry registry) {
-    this(nam, registry, NO_DESCRIPTION);
-  }
-  
-  
-  
-  /**
-   * Set the value
-   * @param newValue
-   */
-  public synchronized void set(final int newValue) {
-    value = newValue;
-    changed = true;
-  }
-  
-  /**
-   * Get value
-   * @return the value last set
-   */
-  public synchronized int get() { 
-    return value;
-  } 
-  
-
-  /**
-   * Push the metric to the mr.
-   * The metric is pushed only if it was updated since last push
-   * 
-   * Note this does NOT push to JMX
-   * (JMX gets the info via {@link #get()}
-   *
-   * @param mr
-   */
-  public synchronized void pushMetric(final MetricsRecord mr) {
-    if (changed) {
-      try {
-        mr.setMetric(getName(), value);
-      } catch (Exception e) {
-        LOG.info("pushMetric failed for " + getName() + "\n" +
-            StringUtils.stringifyException(e));
-      }
-    }
-    changed = false;
-  }
-}

+ 0 - 88
src/core/org/apache/hadoop/metrics/util/MetricsLongValue.java

@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import org.apache.hadoop.metrics.MetricsRecord;
-
-
-/**
- * The MetricsLongValue class is for a metric that is not time varied
- * but changes only when it is set. 
- * Each time its value is set, it is published only *once* at the next update
- * call.
- *
- */
-public class MetricsLongValue extends MetricsBase{  
-  private long value;
-  private boolean changed;
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   */
-  public MetricsLongValue(final String nam, final MetricsRegistry registry, final String description) {
-    super(nam, description);
-    value = 0;
-    changed = false;
-    registry.add(nam, this);
-  }
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsLongValue(final String nam, MetricsRegistry registry) {
-    this(nam, registry, NO_DESCRIPTION);
-  }
-  
-  /**
-   * Set the value
-   * @param newValue
-   */
-  public synchronized void set(final long newValue) {
-    value = newValue;
-    changed = true;
-  }
-  
-  /**
-   * Get value
-   * @return the value last set
-   */
-  public synchronized long get() { 
-    return value;
-  } 
- 
-
-  /**
-   * Push the metric to the mr.
-   * The metric is pushed only if it was updated since last push
-   * 
-   * Note this does NOT push to JMX
-   * (JMX gets the info via {@link #get()}
-   *
-   * @param mr
-   */
-  public synchronized void pushMetric(final MetricsRecord mr) {
-    if (changed) 
-      mr.setMetric(getName(), value);
-    changed = false;
-  }
-}

+ 0 - 85
src/core/org/apache/hadoop/metrics/util/MetricsRegistry.java

@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * 
- * This is the registry for metrics.
- * Related set of metrics should be declared in a holding class and registered
- * in a registry for those metrics which is also stored in the the holding class.
- *
- */
-public class MetricsRegistry {
-  private Map<String, MetricsBase> metricsList = new HashMap<String, MetricsBase>();
-
-  public MetricsRegistry() {
-  }
-  
-  /**
-   * 
-   * @return number of metrics in the registry
-   */
-  public int size() {
-    return metricsList.size();
-  }
-  
-  /**
-   * Add a new metrics to the registry
-   * @param metricsName - the name
-   * @param theMetricsObj - the metrics
-   * @throws IllegalArgumentException if a name is already registered
-   */
-  public synchronized void add(final String metricsName, final MetricsBase theMetricsObj) {
-    if (metricsList.containsKey(metricsName)) {
-      throw new IllegalArgumentException("Duplicate metricsName:" + metricsName);
-    }
-    metricsList.put(metricsName, theMetricsObj);
-  }
-
-  
-  /**
-   * 
-   * @param metricsName
-   * @return the metrics if there is one registered by the supplied name.
-   *         Returns null if none is registered
-   */
-  public synchronized MetricsBase get(final String metricsName) {
-    return metricsList.get(metricsName);
-  }
-  
-  
-  /**
-   * 
-   * @return the list of metrics names
-   */
-  public synchronized Collection<String> getKeyList() {
-    return metricsList.keySet();
-  }
-  
-  /**
-   * 
-   * @return the list of metrics
-   */
-  public synchronized Collection<MetricsBase> getMetricsList() {
-    return metricsList.values();
-  }
-}

+ 0 - 128
src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java

@@ -1,128 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.util.StringUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The MetricsTimeVaryingInt class is for a metric that naturally
- * varies over time (e.g. number of files created). The metrics is accumulated
- * over an interval (set in the metrics config file); the metrics is
- *  published at the end of each interval and then 
- * reset to zero. Hence the counter has the value in the current interval. 
- * 
- * Note if one wants a time associated with the metric then use
- * @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate
- *
- */
-public class MetricsTimeVaryingInt extends MetricsBase {
-
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
-  
-  private int currentValue;
-  private int previousIntervalValue;
-  
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * @param description - the description
-   */
-  public MetricsTimeVaryingInt(final String nam,
-                               final MetricsRegistry registry,
-                               final String description) {
-    super(nam, description);
-    currentValue = 0;
-    previousIntervalValue = 0;
-    registry.add(nam, this);
-  }
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsTimeVaryingInt(final String nam, final MetricsRegistry registry) {
-    this(nam, registry, NO_DESCRIPTION);
-  }
-  
-
-  
-  /**
-   * Inc metrics for incr vlaue
-   * @param incr - number of operations
-   */
-  public synchronized void inc(final int incr) {
-    currentValue += incr;
-  }
-  
-  /**
-   * Inc metrics by one
-   */
-  public synchronized void inc() {
-    currentValue++;
-  }
-
-  private synchronized void intervalHeartBeat() {
-     previousIntervalValue = currentValue;
-     currentValue = 0;
-  }
-  
-  /**
-   * Push the delta  metrics to the mr.
-   * The delta is since the last push/interval.
-   * 
-   * Note this does NOT push to JMX
-   * (JMX gets the info via {@link #previousIntervalValue}
-   *
-   * @param mr
-   */
-  public synchronized void pushMetric(final MetricsRecord mr) {
-    intervalHeartBeat();
-    try {
-      mr.incrMetric(getName(), getPreviousIntervalValue());
-    } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" +
-          StringUtils.stringifyException(e));
-    }
-  }
-  
-  
-  /**
-   * The Value at the Previous interval
-   * @return prev interval value
-   */
-  public synchronized int getPreviousIntervalValue() { 
-    return previousIntervalValue;
-  }
-  
-  /**
-   * The Value at the current interval
-   * @return prev interval value
-   */
-  public synchronized int getCurrentIntervalValue() { 
-    return currentValue;
-  } 
-}

+ 0 - 124
src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java

@@ -1,124 +0,0 @@
-package org.apache.hadoop.metrics.util;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.util.StringUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The MetricsTimeVaryingLong class is for a metric that naturally
- * varies over time (e.g. number of files created). The metrics is accumulated
- * over an interval (set in the metrics config file); the metrics is
- *  published at the end of each interval and then 
- * reset to zero. Hence the counter has the value in the current interval. 
- * 
- * Note if one wants a time associated with the metric then use
- * @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate
- *
- */
-public class MetricsTimeVaryingLong extends MetricsBase{
-
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
- 
-  private long currentValue;
-  private long previousIntervalValue;
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   */
-  public MetricsTimeVaryingLong(final String nam, MetricsRegistry registry, final String description) {
-    super(nam, description);
-    currentValue = 0;
-    previousIntervalValue = 0;
-    registry.add(nam, this);
-  }
-  
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsTimeVaryingLong(final String nam, MetricsRegistry registry) {
-    this(nam, registry, NO_DESCRIPTION);
-  }
-  
-  /**
-   * Inc metrics for incr vlaue
-   * @param incr - number of operations
-   */
-  public synchronized void inc(final long incr) {
-    currentValue += incr;
-  }
-  
-  /**
-   * Inc metrics by one
-   */
-  public synchronized void inc() {
-    currentValue++;
-  }
-
-  private synchronized void intervalHeartBeat() {
-     previousIntervalValue = currentValue;
-     currentValue = 0;
-  }
-  
-  /**
-   * Push the delta  metrics to the mr.
-   * The delta is since the last push/interval.
-   * 
-   * Note this does NOT push to JMX
-   * (JMX gets the info via {@link #previousIntervalValue}
-   *
-   * @param mr
-   */
-  public synchronized void pushMetric(final MetricsRecord mr) {
-    intervalHeartBeat();
-    try {
-      mr.incrMetric(getName(), getPreviousIntervalValue());
-    } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" +
-          StringUtils.stringifyException(e));
-    }
-  }
-  
-  
-  /**
-   * The Value at the Previous interval
-   * @return prev interval value
-   */
-  public synchronized long getPreviousIntervalValue() { 
-    return previousIntervalValue;
-  } 
-  
-  /**
-   * The Value at the current interval
-   * @return prev interval value
-   */
-  public synchronized long getCurrentIntervalValue() { 
-    return currentValue;
-  } 
-}

+ 0 - 196
src/core/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java

@@ -1,196 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.util.StringUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The MetricsTimeVaryingRate class is for a rate based metric that
- * naturally varies over time (e.g. time taken to create a file).
- * The rate is averaged at each interval heart beat (the interval
- * is set in the metrics config file).
- * This class also keeps track of the min and max rates along with 
- * a method to reset the min-max.
- *
- */
-public class MetricsTimeVaryingRate extends MetricsBase {
-
-  private static final Log LOG =
-    LogFactory.getLog("org.apache.hadoop.metrics.util");
-
-  static class Metrics {
-    int numOperations = 0;
-    long time = 0;  // total time or average time
-
-    void set(final Metrics resetTo) {
-      numOperations = resetTo.numOperations;
-      time = resetTo.time;
-    }
-    
-    void reset() {
-      numOperations = 0;
-      time = 0;
-    }
-  }
-  
-  static class MinMax {
-    long minTime = -1;
-    long maxTime = 0;
-    
-    void set(final MinMax newVal) {
-      minTime = newVal.minTime;
-      maxTime = newVal.maxTime;
-    }
-    
-    void reset() {
-      minTime = -1;
-      maxTime = 0;
-    }
-    void update(final long time) { // update min max
-      minTime = (minTime == -1) ? time : Math.min(minTime, time);
-      minTime = Math.min(minTime, time);
-      maxTime = Math.max(maxTime, time);
-    }
-  }
-  private Metrics currentData;
-  private Metrics previousIntervalData;
-  private MinMax minMax;
-  
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   */
-  public MetricsTimeVaryingRate(final String nam, final MetricsRegistry registry, final String description) {
-    super(nam, description);
-    currentData = new Metrics();
-    previousIntervalData = new Metrics();
-    minMax = new MinMax();
-    registry.add(nam, this);
-  }
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsTimeVaryingRate(final String nam, MetricsRegistry registry) {
-    this(nam, registry, NO_DESCRIPTION);
-
-  }
-  
-  
-  /**
-   * Increment the metrics for numOps operations
-   * @param numOps - number of operations
-   * @param time - time for numOps operations
-   */
-  public synchronized void inc(final int numOps, final long time) {
-    currentData.numOperations += numOps;
-    currentData.time += time;
-    long timePerOps = time/numOps;
-    minMax.update(timePerOps);
-  }
-  
-  /**
-   * Increment the metrics for one operation
-   * @param time for one operation
-   */
-  public synchronized void inc(final long time) {
-    currentData.numOperations++;
-    currentData.time += time;
-    minMax.update(time);
-  }
-  
-  
-
-  private synchronized void intervalHeartBeat() {
-     previousIntervalData.numOperations = currentData.numOperations;
-     previousIntervalData.time = (currentData.numOperations == 0) ?
-                             0 : currentData.time / currentData.numOperations;
-     currentData.reset();
-  }
-  
-  /**
-   * Push the delta  metrics to the mr.
-   * The delta is since the last push/interval.
-   * 
-   * Note this does NOT push to JMX
-   * (JMX gets the info via {@link #getPreviousIntervalAverageTime()} and
-   * {@link #getPreviousIntervalNumOps()}
-   *
-   * @param mr
-   */
-  public synchronized void pushMetric(final MetricsRecord mr) {
-    intervalHeartBeat();
-    try {
-      mr.incrMetric(getName() + "_num_ops", getPreviousIntervalNumOps());
-      mr.setMetric(getName() + "_avg_time", getPreviousIntervalAverageTime());
-    } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" +
-          StringUtils.stringifyException(e));
-    }
-  }
-  
-  /**
-   * The number of operations in the previous interval
-   * @return - ops in prev interval
-   */
-  public synchronized int getPreviousIntervalNumOps() { 
-    return previousIntervalData.numOperations;
-  }
-  
-  /**
-   * The average rate of an operation in the previous interval
-   * @return - the average rate.
-   */
-  public synchronized long getPreviousIntervalAverageTime() {
-    return previousIntervalData.time;
-  } 
-  
-  /**
-   * The min time for a single operation since the last reset
-   *  {@link #resetMinMax()}
-   * @return min time for an operation
-   */
-  public synchronized long getMinTime() {
-    return  minMax.minTime;
-  }
-  
-  /**
-   * The max time for a single operation since the last reset
-   *  {@link #resetMinMax()}
-   * @return max time for an operation
-   */
-  public synchronized long getMaxTime() {
-    return minMax.maxTime;
-  }
-  
-  /**
-   * Reset the min max values
-   */
-  public synchronized void resetMinMax() {
-    minMax.reset();
-  }
-}

+ 108 - 0
src/core/org/apache/hadoop/metrics2/Metric.java

@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * The immutable metric
+ */
+public abstract class Metric {
+
+  public static final String NO_DESCRIPTION = "<<no description>>";
+  private final String name;
+  private final String description;
+
+  /**
+   * Construct the metric with name only
+   * @param name  of the metric
+   */
+  public Metric(String name) {
+    this.name = name;
+    this.description = NO_DESCRIPTION;
+  }
+
+  /**
+   * Construct the metric with a name and a description
+   * @param name  of the metric
+   * @param desc  description of the metric
+   */
+  public Metric(String name, String desc) {
+    this.name = name;
+    this.description = desc;
+  }
+
+  /**
+   * Get the name of the metric
+   * @return  the name
+   */
+  public String name() {
+    return name;
+  }
+
+  /**
+   * Get the description of the metric
+   * @return  the description
+   */
+  public String description() {
+    return description;
+  }
+
+  /**
+   * Get the value of the metric
+   * @return  the value of the metric
+   */
+  public abstract Number value();
+
+  /**
+   * Accept a visitor interface
+   * @param visitor of the metric
+   */
+  public abstract void visit(MetricsVisitor visitor);
+
+  // Mostly for testing
+  @Override public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    final Metric other = (Metric) obj;
+    if (!this.name.equals(other.name())) {
+      return false;
+    }
+    if (!this.description.equals(other.description())) {
+      return false;
+    }
+    if (!value().equals(other.value())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override public int hashCode() {
+    return name.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return "Metric{" + "name='" + name + "' description='" + description +
+           "' value="+ value() +'}';
+  }
+
+}

+ 38 - 0
src/core/org/apache/hadoop/metrics2/MetricCounter.java

@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * A generic immutable counter metric type
+ * @param <T> value type of the metric
+ */
+public abstract class MetricCounter<T extends Number> extends Metric {
+
+  /**
+   * Construct a counter metric
+   * @param name  of the metric
+   * @param description of the metric
+   */
+  public MetricCounter(String name, String description) {
+    super(name, description);
+  }
+
+  public abstract T value();
+
+}

+ 37 - 0
src/core/org/apache/hadoop/metrics2/MetricGauge.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * A generic immutable gauge metric
+ * @param <T> value type of the metric
+ */
+public abstract class MetricGauge<T extends Number> extends Metric {
+
+  /**
+   * Construct a gauge metric
+   * @param name  of the metric
+   * @param description of the metric
+   */
+  public MetricGauge(String name, String description) {
+    super(name, description);
+  }
+
+  public abstract T value();
+}

+ 9 - 9
src/core/org/apache/hadoop/metrics/Updater.java → src/core/org/apache/hadoop/metrics2/MetricsBuilder.java

@@ -1,6 +1,4 @@
-/*
- * Updater.java
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,16 +16,18 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.metrics;
+package org.apache.hadoop.metrics2;
 
 /**
- * Call-back interface.  See <code>MetricsContext.registerUpdater()</code>.
+ * The metrics builder interface
  */
-public interface Updater {
-    
+public interface MetricsBuilder {
+
   /**
-   * Timer-based call-back from the metric library. 
+   * Add a metrics record
+   * @param name  of the record
+   * @return  a metrics record builder for the record
    */
-  public abstract void doUpdates(MetricsContext context);
+  public MetricsRecordBuilder addRecord(String name);
 
 }

+ 51 - 0
src/core/org/apache/hadoop/metrics2/MetricsException.java

@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * A general metrics exception wrapper
+ */
+public class MetricsException extends RuntimeException {
+  private static final long serialVersionUID = 1L;
+
+  /**
+   * Construct the exception with a message
+   * @param message for the exception
+   */
+  public MetricsException(String message) {
+    super(message);
+  }
+
+  /**
+   * Construct the exception with a message and a cause
+   * @param message for the exception
+   * @param cause of the exception
+   */
+  public MetricsException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  /**
+   * Construct the exception with a cause
+   * @param cause of the exception
+   */
+  public MetricsException(Throwable cause) {
+    super(cause);
+  }
+}

+ 60 - 0
src/core/org/apache/hadoop/metrics2/MetricsFilter.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+
+/**
+ * The metrics filter interface
+ */
+public abstract class MetricsFilter implements MetricsPlugin {
+
+  public abstract void init(SubsetConfiguration conf);
+
+  /**
+   * Whether to accept the name
+   * @param name  to filter on
+   * @return  true to accept; false otherwise.
+   */
+  public abstract boolean accepts(String name);
+
+  /**
+   * Whether to accept the tag
+   * @param tag to filter on
+   * @return  true to accept; false otherwise
+   */
+  public abstract boolean accepts(MetricsTag tag);
+
+  /**
+   * Whether to accept the tags
+   * @param tags to filter on
+   * @return  true to accept; false otherwise
+   */
+  public abstract boolean accepts(Iterable<MetricsTag> tags);
+
+  /**
+   * Whether to accept the record
+   * @param record  to filter on
+   * @return  true to accept; false otherwise.
+   */
+  public boolean accepts(MetricsRecord record) {
+    return accepts(record.tags());
+  }
+
+}

+ 34 - 0
src/core/org/apache/hadoop/metrics2/MetricsPlugin.java

@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+
+/**
+ * A fairly generic plugin interface
+ */
+public interface MetricsPlugin {
+
+  /**
+   * Initialize the plugin
+   * @param conf  the configuration object for the plugin
+   */
+  void init(SubsetConfiguration conf);
+
+}

+ 55 - 0
src/core/org/apache/hadoop/metrics2/MetricsRecord.java

@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * An immutable snapshot of metrics with a timestamp
+ */
+public interface MetricsRecord {
+  /**
+   * Get the timestamp of the metrics
+   * @return  the timestamp
+   */
+  long timestamp();
+
+  /**
+   * Get the record name of the metrics
+   * @return  the record name
+   */
+  String name();
+
+  /**
+   * Get the context name of the metrics
+   * @return  the context name
+   */
+  String context();
+
+  /**
+   * Get the tags of the record
+   * @return  the tags
+   */
+  Iterable<MetricsTag> tags();
+
+  /**
+   * Get the metrics of the record
+   * @return  the metrics
+   */
+  Iterable<Metric> metrics();
+
+}

+ 121 - 0
src/core/org/apache/hadoop/metrics2/MetricsRecordBuilder.java

@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * The metrics record builder interface
+ */
+public abstract class MetricsRecordBuilder {
+
+  /**
+   * Add a metrics tag
+   * @param name  of the tag
+   * @param description of the tag
+   * @param value of the tag
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder tag(String name, String description,
+                                           String value);
+
+  /**
+   * Add an immutable metrics tag object
+   * @param tag a pre-made tag object (potentially save an object construction)
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder add(MetricsTag tag);
+
+  /**
+   * Set the context tag
+   * @param value of the context
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder setContext(String value);
+
+  /**
+   * Add an int counter metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param value of the metric
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder addCounter(String name,
+                                                  String description,
+                                                  int value);
+
+  /**
+   * Add an long counter metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param value of the metric
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder addCounter(String name,
+                                                  String description,
+                                                  long value);
+
+  /**
+   * Add a int gauge metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param value of the metric
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder addGauge(String name,
+                                                String description, int value);
+
+  /**
+   * Add a long gauge metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param value of the metric
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder addGauge(String name,
+                                                String description, long value);
+
+  /**
+   * Add a float gauge metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param value of the metric
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder addGauge(String name,
+                                                String description,
+                                                float value);
+
+  /**
+   * Add a double gauge metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param value of the metric
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder addGauge(String name,
+                                                String description,
+                                                double value);
+
+  /**
+   * Add a pre-made immutable metric object
+   * @param metric  the pre-made metric to save an object construction
+   * @return  self
+   */
+  public abstract MetricsRecordBuilder add(Metric metric);
+
+}

+ 37 - 0
src/core/org/apache/hadoop/metrics2/MetricsSink.java

@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * The metrics sink interface
+ */
+public interface MetricsSink extends MetricsPlugin {
+
+  /**
+   * Put a metrics record in the sink
+   * @param record  the record to put
+   */
+  void putMetrics(MetricsRecord record);
+
+  /**
+   * Flush any buffered metrics
+   */
+  void flush();
+
+}

+ 33 - 0
src/core/org/apache/hadoop/metrics2/MetricsSource.java

@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * The metrics source interface
+ */
+public interface MetricsSource {
+
+  /**
+   * Get metrics from the source
+   * @param builder to contain the resulting metrics snapshot
+   * @param all     if true, return all metrics even if unchanged.
+   */
+  void getMetrics(MetricsBuilder builder, boolean all);
+
+}

+ 101 - 0
src/core/org/apache/hadoop/metrics2/MetricsSystem.java

@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * The metrics system interface
+ */
+public interface MetricsSystem extends MetricsSystemMXBean {
+
+  /**
+   * Register a metrics source
+   * @param <T>   the type of the source
+   * @param source  to register
+   * @param name  of the source. Must be unique.
+   * @param desc  the description of the source.
+   * @return the source
+   * @exception MetricsException
+   */
+  <T extends MetricsSource> T register(String name, String desc, T source);
+
+  /**
+   * Register a metrics sink
+   * @param <T>   the type of the sink
+   * @param sink  to register
+   * @param name  of the sink. Must be unique.
+   * @param desc  the description of the sink
+   * @return the sink
+   * @exception MetricsException
+   */
+  <T extends MetricsSink> T register(String name, String desc, T sink);
+
+  /**
+   * Register a callback interface for JMX events
+   * @param callback  the callback object implementing the MBean interface.
+   */
+  void register(Callback callback);
+
+  /**
+   * Shutdown the metrics system completely (usually during server shutdown.)
+   * The MetricsSystemMXBean will be unregistered.
+   */
+  void shutdown();
+
+  /**
+   * The metrics system callback interface
+   */
+  @SuppressWarnings("PublicInnerClass")
+  static interface Callback {
+
+    /**
+     * Called before start()
+     */
+    void preStart();
+
+    /**
+     * Called after start()
+     */
+    void postStart();
+
+    /**
+     * Called before stop()
+     */
+    void preStop();
+
+    /**
+     * Called after stop()
+     */
+    void postStop();
+
+  }
+
+  /**
+   * Convenient abstract class for implementing callback interface
+   */
+  @SuppressWarnings("PublicInnerClass")
+  public static abstract class AbstractCallback implements Callback {
+
+    public void preStart() {}
+    public void postStart() {}
+    public void preStop() {}
+    public void postStop() {}
+
+  }
+
+}

+ 52 - 0
src/core/org/apache/hadoop/metrics2/MetricsSystemMXBean.java

@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * The JMX interface to the metrics system
+ */
+public interface MetricsSystemMXBean {
+
+  /**
+   * Start the metrics system
+   * @exception MetricsException
+   */
+  public void start();
+
+  /**
+   * Stop the metrics system
+   * @exception MetricsException
+   */
+  public void stop();
+
+  /**
+   * Force a refresh of MBeans
+   * @exception MetricsException
+   */
+  public void refreshMBeans();
+
+  /**
+   * @return the current config
+   * Note, avoid getConfig, as it'll turn it into an attribute,
+   * which doesn't support multiple lines in the values.
+   * @exception MetricsException
+   */
+  public String currentConfig();
+
+}

+ 99 - 0
src/core/org/apache/hadoop/metrics2/MetricsTag.java

@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * Immutable tag for metrics (for grouping on host/queue/username etc.)
+ */
+public class MetricsTag {
+
+  private final String name;
+  private final String description;
+  private final String value;
+
+  /**
+   * Construct the tag with name, description and value
+   * @param name  of the tag
+   * @param description of the tag
+   * @param value of the tag
+   */
+  public MetricsTag(String name, String description, String value) {
+    this.name = name;
+    this.description = description;
+    this.value = value;
+  }
+
+  /**
+   * Get the name of the tag
+   * @return  the name
+   */
+  public String name() {
+    return name;
+  }
+
+  /**
+   * Get the description of the tag
+   * @return  the description
+   */
+  public String description() {
+    return description;
+  }
+
+  /**
+   * Get the value of the tag
+   * @return  the value
+   */
+  public String value() {
+    return value;
+  }
+
+  // Mostly for testing
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    final MetricsTag other = (MetricsTag) obj;
+    if (!this.name.equals(other.name())) {
+      return false;
+    }
+    if (!this.description.equals(other.description())) {
+      return false;
+    }
+    if (!this.value.equals(other.value())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return name.hashCode() ^ value.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return "MetricsTag{" + "name='" + name + "' description='" + description +
+           "' value='" + value + "'}";
+  }
+
+}

+ 68 - 0
src/core/org/apache/hadoop/metrics2/MetricsVisitor.java

@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2;
+
+/**
+ * A visitor interface for metrics
+ */
+public interface MetricsVisitor {
+
+  /**
+   * Callback for int value gauges
+   * @param metric the metric object
+   * @param value of the metric
+   */
+  public void gauge(MetricGauge<Integer> metric, int value);
+
+  /**
+   * Callback for long value gauges
+   * @param metric the metric object
+   * @param value of the metric
+   */
+  public void gauge(MetricGauge<Long> metric, long value);
+
+  /**
+   * Callback for float value gauges
+   * @param metric the metric object
+   * @param value of the metric
+   */
+  public void gauge(MetricGauge<Float> metric, float value);
+
+  /**
+   * Callback for double value gauges
+   * @param metric the metric object
+   * @param value of the metric
+   */
+  public void gauge(MetricGauge<Double> metric, double value);
+
+  /**
+   * Callback for integer value counters
+   * @param metric the metric object
+   * @param value of the metric
+   */
+  public void counter(MetricCounter<Integer> metric, int value);
+
+  /**
+   * Callback for long value counters
+   * @param metric the metric object
+   * @param value of the metric
+   */
+  public void counter(MetricCounter<Long> metric, long value);
+
+}

+ 165 - 0
src/core/org/apache/hadoop/metrics2/filter/AbstractPatternFilter.java

@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.filter;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsFilter;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * Base class for pattern based filters
+ */
+abstract class AbstractPatternFilter extends MetricsFilter {
+
+  protected static final String INCLUDE_KEY = "include";
+  protected static final String EXCLUDE_KEY = "exclude";
+  protected static final String INCLUDE_TAGS_KEY = "include.tags";
+  protected static final String EXCLUDE_TAGS_KEY = "exclude.tags";
+
+  private Pattern includePattern;
+  private Pattern excludePattern;
+  private final Map<String, Pattern> includeTagPatterns;
+  private final Map<String, Pattern> excludeTagPatterns;
+  private final Pattern tagPattern = Pattern.compile("^(\\w+):(.*)");
+
+  AbstractPatternFilter() {
+    includeTagPatterns = new HashMap<String, Pattern>();
+    excludeTagPatterns = new HashMap<String, Pattern>();
+  }
+
+  @Override
+  public void init(SubsetConfiguration conf) {
+    String patternString = conf.getString(INCLUDE_KEY);
+    if (patternString != null && !patternString.isEmpty()) {
+      setIncludePattern(compile(patternString));
+    }
+    patternString = conf.getString(EXCLUDE_KEY);
+    if (patternString != null && !patternString.isEmpty()) {
+      setExcludePattern(compile(patternString));
+    }
+    String[] patternStrings = conf.getStringArray(INCLUDE_TAGS_KEY);
+    if (patternStrings != null && patternStrings.length != 0) {
+      for (String pstr : patternStrings) {
+        Matcher matcher = tagPattern.matcher(pstr);
+        if (!matcher.matches()) {
+          throw new MetricsException("Illegal tag pattern: "+ pstr);
+        }
+        setIncludeTagPattern(matcher.group(1), compile(matcher.group(2)));
+      }
+    }
+    patternStrings = conf.getStringArray(EXCLUDE_TAGS_KEY);
+    if (patternStrings != null && patternStrings.length != 0) {
+      for (String pstr : patternStrings) {
+        Matcher matcher = tagPattern.matcher(pstr);
+        if (!matcher.matches()) {
+          throw new MetricsException("Illegal tag pattern: "+ pstr);
+        }
+        setExcludeTagPattern(matcher.group(1), compile(matcher.group(2)));
+      }
+    }
+  }
+
+  void setIncludePattern(Pattern includePattern) {
+    this.includePattern = includePattern;
+  }
+
+  void setExcludePattern(Pattern excludePattern) {
+    this.excludePattern = excludePattern;
+  }
+
+  void setIncludeTagPattern(String name, Pattern pattern) {
+    includeTagPatterns.put(name, pattern);
+  }
+
+  void setExcludeTagPattern(String name, Pattern pattern) {
+    excludeTagPatterns.put(name, pattern);
+  }
+
+  @Override
+  public boolean accepts(MetricsTag tag) {
+    // Accept if whitelisted
+    Pattern ipat = includeTagPatterns.get(tag.name());
+    if (ipat != null && ipat.matcher(tag.value()).matches()) {
+      return true;
+    }
+    // Reject if blacklisted
+    Pattern epat = excludeTagPatterns.get(tag.name());
+    if (epat != null && epat.matcher(tag.value()).matches()) {
+      return false;
+    }
+    // Reject if no match in whitelist only mode
+    if (ipat != null && epat == null) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public boolean accepts(Iterable<MetricsTag> tags) {
+    // Accept if any include tag pattern matches
+    for (MetricsTag t : tags) {
+      Pattern pat = includeTagPatterns.get(t.name());
+      if (pat != null && pat.matcher(t.value()).matches()) {
+        return true;
+      }
+    }
+    // Reject if any exclude tag pattern matches
+    for (MetricsTag t : tags) {
+      Pattern pat = excludeTagPatterns.get(t.name());
+      if (pat != null && pat.matcher(t.value()).matches()) {
+        return false;
+      }
+    }
+    // Reject if no match in whitelist only mode
+    if (!includeTagPatterns.isEmpty() && excludeTagPatterns.isEmpty()) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public boolean accepts(String name) {
+    // Accept if whitelisted
+    if (includePattern != null && includePattern.matcher(name).matches()) {
+      return true;
+    }
+    // Reject if blacklisted
+    if ((excludePattern != null && excludePattern.matcher(name).matches())) {
+      return false;
+    }
+    // Reject if no match in whitelist only mode
+    if (includePattern != null && excludePattern == null) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Compile a string pattern in to a pattern object
+   * @param s the string pattern to compile
+   * @return the compiled pattern object
+   */
+  protected abstract Pattern compile(String s);
+}

+ 34 - 0
src/core/org/apache/hadoop/metrics2/filter/GlobFilter.java

@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.filter;
+
+import java.util.regex.Pattern;
+import org.apache.hadoop.fs.GlobPattern;
+
+/**
+ * A glob pattern filter for metrics
+ */
+public class GlobFilter extends AbstractPatternFilter {
+
+  @Override
+  protected Pattern compile(String s) {
+    return GlobPattern.compile(s);
+  }
+
+}

+ 33 - 0
src/core/org/apache/hadoop/metrics2/filter/RegexFilter.java

@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.filter;
+
+import java.util.regex.Pattern;
+
+/**
+ * A regex pattern filter for metrics
+ */
+public class RegexFilter extends AbstractPatternFilter {
+
+  @Override
+  protected Pattern compile(String s) {
+    return Pattern.compile(s);
+  }
+
+}

+ 26 - 0
src/core/org/apache/hadoop/metrics2/impl/Consumer.java

@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+/**
+ * A simple generic consumer interface
+ */
+interface Consumer<T> {
+  void consume(T object) throws InterruptedException;
+}

+ 107 - 0
src/core/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java

@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.management.MBeanAttributeInfo;
+import javax.management.MBeanInfo;
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricCounter;
+import org.apache.hadoop.metrics2.MetricGauge;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+import org.apache.hadoop.metrics2.MetricsVisitor;
+
+/**
+ * Helper class to build MBeanInfo from metrics records
+ */
+class MBeanInfoBuilder implements MetricsVisitor {
+
+  private final String name, description;
+  private List<MBeanAttributeInfo> attrs;
+  private Iterable<MetricsRecordImpl> recs;
+  private int curRecNo;
+
+  MBeanInfoBuilder(String name, String desc) {
+    this.name = name;
+    description = desc;
+    attrs = new ArrayList<MBeanAttributeInfo>();
+  }
+
+  MBeanInfoBuilder reset(Iterable<MetricsRecordImpl> recs) {
+    this.recs = recs;
+    attrs.clear();
+    return this;
+  }
+
+  MBeanAttributeInfo newAttrInfo(String name, String desc, String type) {
+    return new MBeanAttributeInfo(getAttrName(name), type, desc,
+                                  true, false, false); // read-only, non-is
+  }
+
+  MBeanAttributeInfo newAttrInfo(Metric m, String type) {
+    return newAttrInfo(m.name(), m.description(), type);
+  }
+
+  public void gauge(MetricGauge<Integer> metric, int value) {
+    attrs.add(newAttrInfo(metric, "java.lang.Integer"));
+  }
+
+  public void gauge(MetricGauge<Long> metric, long value) {
+    attrs.add(newAttrInfo(metric, "java.lang.Long"));
+  }
+
+  public void gauge(MetricGauge<Float> metric, float value) {
+    attrs.add(newAttrInfo(metric, "java.lang.Float"));
+  }
+
+  public void gauge(MetricGauge<Double> metric, double value) {
+    attrs.add(newAttrInfo(metric, "java.lang.Double"));
+  }
+
+  public void counter(MetricCounter<Integer> metric, int value) {
+    attrs.add(newAttrInfo(metric, "java.lang.Integer"));
+  }
+
+  public void counter(MetricCounter<Long> metric, long value) {
+    attrs.add(newAttrInfo(metric, "java.lang.Long"));
+  }
+
+  String getAttrName(String name) {
+    return curRecNo > 0 ? name +"."+ curRecNo : name;
+  }
+
+  MBeanInfo get() {
+    curRecNo = 0;
+    for (MetricsRecordImpl rec : recs) {
+      for (MetricsTag t : rec.tags()) {
+        attrs.add(newAttrInfo("tag."+ t.name(), t.description(),
+                  "java.lang.String"));
+      }
+      for (Metric m : rec.metrics()) {
+        m.visit(this);
+      }
+      ++curRecNo;
+    }
+    MBeanAttributeInfo[] attrsArray = new MBeanAttributeInfo[attrs.size()];
+    return new MBeanInfo(name, description, attrs.toArray(attrsArray),
+                         null, null, null); // no ops/ctors/notifications
+  }
+}

+ 17 - 28
src/core/org/apache/hadoop/metrics/spi/MetricValue.java → src/core/org/apache/hadoop/metrics2/impl/MetricCounterInt.java

@@ -1,6 +1,4 @@
-/*
- * MetricValue.java
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,35 +16,26 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.metrics.spi;
+package org.apache.hadoop.metrics2.impl;
 
-/**
- * A Number that is either an absolute or an incremental amount.
- */
-public class MetricValue {
-    
-  public static final boolean ABSOLUTE = false;
-  public static final boolean INCREMENT = true;
-    
-  private boolean isIncrement;
-  private Number number;
-    
-  /** Creates a new instance of MetricValue */
-  public MetricValue(Number number, boolean isIncrement) {
-    this.number = number;
-    this.isIncrement = isIncrement;
-  }
+import org.apache.hadoop.metrics2.MetricCounter;
+import org.apache.hadoop.metrics2.MetricsVisitor;
 
-  public boolean isIncrement() {
-    return isIncrement;
+class MetricCounterInt extends MetricCounter<Integer> {
+
+  final int value;
+
+  MetricCounterInt(String name, String description, int value) {
+    super(name, description);
+    this.value = value;
   }
-    
-  public boolean isAbsolute() {
-    return !isIncrement;
+
+  public Integer value() {
+    return value;
   }
 
-  public Number getNumber() {
-    return number;
+  public void visit(MetricsVisitor visitor) {
+    visitor.counter(this, value);
   }
-    
+
 }

+ 41 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricCounterLong.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.MetricCounter;
+import org.apache.hadoop.metrics2.MetricsVisitor;
+
+class MetricCounterLong extends MetricCounter<Long> {
+
+  final long value;
+
+  MetricCounterLong(String name, String description, long value) {
+    super(name, description);
+    this.value = value;
+  }
+
+  public Long value() {
+    return value;
+  }
+
+  public void visit(MetricsVisitor visitor) {
+    visitor.counter(this, value);
+  }
+
+}

+ 41 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricGaugeDouble.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.MetricGauge;
+import org.apache.hadoop.metrics2.MetricsVisitor;
+
+class MetricGaugeDouble extends MetricGauge<Double> {
+
+  final double value;
+
+  MetricGaugeDouble(String name, String description, double value) {
+    super(name, description);
+    this.value = value;
+  }
+
+  public Double value() {
+    return value;
+  }
+
+  public void visit(MetricsVisitor visitor) {
+    visitor.gauge(this, value);
+  }
+
+}

+ 41 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricGaugeFloat.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.MetricGauge;
+import org.apache.hadoop.metrics2.MetricsVisitor;
+
+class MetricGaugeFloat extends MetricGauge<Float> {
+
+  final float value;
+
+  MetricGaugeFloat(String name, String description, float value) {
+    super(name, description);
+    this.value = value;
+  }
+
+  public Float value() {
+    return value;
+  }
+
+  public void visit(MetricsVisitor visitor) {
+    visitor.gauge(this, value);
+  }
+
+}

+ 41 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricGaugeInt.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.MetricGauge;
+import org.apache.hadoop.metrics2.MetricsVisitor;
+
+class MetricGaugeInt extends MetricGauge<Integer> {
+
+  final int value;
+
+  MetricGaugeInt(String name, String description, int value) {
+    super(name, description);
+    this.value = value;
+  }
+
+  public Integer value() {
+    return value;
+  }
+
+  public void visit(MetricsVisitor visitor) {
+    visitor.gauge(this, value);
+  }
+
+}

+ 41 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricGaugeLong.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.MetricGauge;
+import org.apache.hadoop.metrics2.MetricsVisitor;
+
+class MetricGaugeLong extends MetricGauge<Long> {
+
+  final long value;
+
+  MetricGaugeLong(String name, String description, long value) {
+    super(name, description);
+    this.value = value;
+  }
+
+  public Long value() {
+    return value;
+  }
+
+  public void visit(MetricsVisitor visitor) {
+    visitor.gauge(this, value);
+  }
+
+}

+ 56 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsBuffer.java

@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.Iterator;
+
+/**
+ * An immutable element for the sink queues.
+ */
+class MetricsBuffer implements Iterable<MetricsBuffer.Entry> {
+
+  private final Iterable<Entry> mutable;
+
+  MetricsBuffer(Iterable<MetricsBuffer.Entry> mutable) {
+    this.mutable = mutable;
+  }
+
+  public Iterator<Entry> iterator() {
+    return mutable.iterator();
+  }
+
+  static class Entry {
+    private final String sourceName;
+    private final Iterable<MetricsRecordImpl> records;
+
+    Entry(String name, Iterable<MetricsRecordImpl> records) {
+      sourceName = name;
+      this.records = records;
+    }
+
+    String name() {
+      return sourceName;
+    }
+
+    Iterable<MetricsRecordImpl> records() {
+      return records;
+    }
+  }
+
+}

+ 12 - 22
src/core/org/apache/hadoop/metrics/util/MetricsBase.java → src/core/org/apache/hadoop/metrics2/impl/MetricsBufferBuilder.java

@@ -15,33 +15,23 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.metrics.util;
 
-import org.apache.hadoop.metrics.MetricsRecord;
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.ArrayList;
 
 /**
- * 
- * This is base class for all metrics
- *
+ * Builder for the immutable metrics buffers
  */
-public abstract class MetricsBase {
-  public static final String NO_DESCRIPTION = "NoDescription";
-  final private String name;
-  final private String description;
-  
-  protected MetricsBase(final String nam) {
-    name = nam;
-    description = NO_DESCRIPTION;
+class MetricsBufferBuilder extends ArrayList<MetricsBuffer.Entry> {
+  private static final long serialVersionUID = 1L;
+
+  boolean add(String name, Iterable<MetricsRecordImpl> records) {
+    return add(new MetricsBuffer.Entry(name, records));
   }
-  
-  protected MetricsBase(final String nam, final String desc) {
-    name = nam;
-    description = desc;
+
+  MetricsBuffer get() {
+    return new MetricsBuffer(this);
   }
-  
-  public abstract void pushMetric(final MetricsRecord mr);
-  
-  public String getName() { return name; }
-  public String getDescription() { return description; };
 
 }

+ 67 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsBuilderImpl.java

@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.metrics2.MetricsBuilder;
+import org.apache.hadoop.metrics2.MetricsFilter;
+
+class MetricsBuilderImpl extends ArrayList<MetricsRecordBuilderImpl>
+                         implements MetricsBuilder {
+  private static final long serialVersionUID = 1L;
+  private MetricsFilter recordFilter, metricFilter;
+
+  @Override
+  public MetricsRecordBuilderImpl addRecord(String name) {
+    boolean acceptable = recordFilter == null || recordFilter.accepts(name);
+    MetricsRecordBuilderImpl rb =
+        new MetricsRecordBuilderImpl(name, recordFilter, metricFilter,
+                                     acceptable);
+    if (acceptable) {
+      add(rb);
+    }
+    return rb;
+  }
+
+
+  public List<MetricsRecordImpl> getRecords() {
+    List<MetricsRecordImpl> records =
+        new ArrayList<MetricsRecordImpl>(size());
+    for (MetricsRecordBuilderImpl rb : this) {
+      MetricsRecordImpl mr = rb.getRecord();
+      if (mr != null) {
+        records.add(mr);
+      }
+    }
+    return records;
+  }
+
+  MetricsBuilderImpl setRecordFilter(MetricsFilter rf) {
+    recordFilter = rf;
+    return this;
+  }
+
+  MetricsBuilderImpl setMetricFilter(MetricsFilter mf) {
+    metricFilter = mf;
+    return this;
+  }
+
+}

+ 202 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsConfig.java

@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Locale;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.MetricsPlugin;
+
+import org.apache.hadoop.util.StringUtils;
+
+class MetricsConfig extends SubsetConfiguration {
+
+  static final Log LOG = LogFactory.getLog(MetricsConfig.class);
+
+  static final String DEFAULT_FILE_NAME = "hadoop-metrics2.properties";
+  static final String PREFIX_DEFAULT = "*.";
+
+  static final String PERIOD_KEY = "period";
+  static final int PERIOD_DEFAULT = 10; // seconds
+
+  static final String QUEUE_CAPACITY_KEY = "queue.capacity";
+  static final int QUEUE_CAPACITY_DEFAULT = 1;
+
+  static final String RETRY_DELAY_KEY = "retry.delay";
+  static final int RETRY_DELAY_DEFAULT = 10;  // seconds
+  static final String RETRY_BACKOFF_KEY = "retry.backoff";
+  static final int RETRY_BACKOFF_DEFAULT = 2; // back off factor
+  static final String RETRY_COUNT_KEY = "retry.count";
+  static final int RETRY_COUNT_DEFAULT = 3;
+
+  static final String JMX_CACHE_TTL_KEY = "jmx.cache.ttl";
+  static final int JMX_CACHE_TTL_DEFAULT = 10000; // millis
+
+  static final String CONTEXT_KEY = "context";
+  static final String NAME_KEY = "name";
+  static final String DESC_KEY = "description";
+  static final String SOURCE_KEY = "source";
+  static final String SINK_KEY = "sink";
+  static final String METRIC_FILTER_KEY = "metric.filter";
+  static final String RECORD_FILTER_KEY = "record.filter";
+  static final String SOURCE_FILTER_KEY = "source.filter";
+
+  static final Pattern INSTANCE_REGEX = Pattern.compile("([^.*]+)\\..+");
+
+  MetricsConfig(Configuration c, String prefix) {
+    super(c, prefix.toLowerCase(Locale.US), ".");
+  }
+
+  static MetricsConfig create(String prefix) {
+    return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
+                     +".properties", DEFAULT_FILE_NAME);
+  }
+
+  static MetricsConfig create(String prefix, String... fileNames) {
+    return loadFirst(prefix, fileNames);
+  }
+
+  /**
+   * Load configuration from a list of files until the first successful load
+   * @param conf  the configuration object
+   * @param files the list of filenames to try
+   * @return  the configuration object
+   */
+  static MetricsConfig loadFirst(String prefix, String... fileNames) {
+    for (String fname : fileNames) {
+      try {
+        PropertiesConfiguration cf = new PropertiesConfiguration(fname);
+        LOG.info("loaded properties from "+ fname);
+        return new MetricsConfig(cf, prefix);
+      }
+      catch (ConfigurationException e) {
+        if (e.getMessage().startsWith("Cannot locate configuration")) {
+          continue;
+        }
+        throw new MetricsConfigException(e);
+      }
+    }
+    throw new MetricsConfigException("Cannot locate configuration: tried "+
+        StringUtils.join(", ", fileNames));
+  }
+
+  @Override
+  public MetricsConfig subset(String prefix) {
+    return new MetricsConfig(this, prefix);
+  }
+
+  /**
+   * Return sub configs for instance specified in the config.
+   * Assuming format specified as follows:<pre>
+   * [type].[instance].[option] = [value]</pre>
+   * Note, '*' is a special default instance, which is excluded in the result.
+   * @param type  of the instance
+   * @return  a map with [instance] as key and config object as value
+   */
+  Map<String, MetricsConfig> getInstanceConfigs(String type) {
+    HashMap<String, MetricsConfig> map = new HashMap<String, MetricsConfig>();
+    MetricsConfig sub = subset(type);
+
+    for (String key : sub.keys()) {
+      Matcher matcher = INSTANCE_REGEX.matcher(key);
+      if (matcher.matches()) {
+        String instance = matcher.group(1);
+        if (!map.containsKey(instance)) {
+          map.put(instance, sub.subset(instance));
+        }
+      }
+    }
+    return map;
+  }
+
+  Iterable<String> keys() {
+    return new Iterable<String>() {
+      @SuppressWarnings("unchecked")
+      public Iterator<String> iterator() {
+        return (Iterator<String>) getKeys();
+      }
+    };
+  }
+
+  /**
+   * Will poke parents for defaults
+   * @param key to lookup
+   * @return  the value or null
+   */
+  @Override
+  public Object getProperty(String key) {
+    Object value = super.getProperty(key);
+    if (value == null) {
+      LOG.debug("poking parent "+ getParent().getClass().getSimpleName() +
+                " for "+ key);
+      return getParent().getProperty(key.startsWith(PREFIX_DEFAULT) ? key
+                                     : PREFIX_DEFAULT + key);
+    }
+    return value;
+  }
+
+  <T extends MetricsPlugin> T getPlugin(String name) {
+    String classKey = name.isEmpty() ? "class" : name +".class";
+    String pluginClassName = getString(classKey);
+    if (pluginClassName == null || pluginClassName.isEmpty()) {
+      return null;
+    }
+    try {
+      Class<?> pluginClass = Class.forName(pluginClassName);
+      @SuppressWarnings("unchecked")
+      T plugin = (T) pluginClass.newInstance();
+      plugin.init(name.isEmpty() ? this : subset(name));
+      return plugin;
+    }
+    catch (Exception e) {
+      throw new MetricsConfigException("Error creating plugin: "+
+                                       pluginClassName, e);
+    }
+  }
+
+  @Override
+  public String toString() {
+    return toString(this);
+  }
+
+  String toString(Configuration c) {
+    ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+    PrintStream ps = new PrintStream(buffer);
+    PropertiesConfiguration tmp = new PropertiesConfiguration();
+    tmp.copy(c);
+    try { tmp.save(ps); }
+    catch (Exception e) {
+      throw new MetricsConfigException(e);
+    }
+    return buffer.toString();
+  }
+
+}

+ 41 - 42
src/core/org/apache/hadoop/metrics/MetricsException.java → src/core/org/apache/hadoop/metrics2/impl/MetricsConfigException.java

@@ -1,42 +1,41 @@
-/*
- * MetricsException.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-/**
- * General-purpose, unchecked metrics exception.
- */
-public class MetricsException extends RuntimeException {
-    
-  private static final long serialVersionUID = -1643257498540498497L;
-
-  /** Creates a new instance of MetricsException */
-  public MetricsException() {
-  }
-    
-  /** Creates a new instance of MetricsException 
-   *
-   * @param message an error message
-   */
-  public MetricsException(String message) {
-    super(message);
-  }
-    
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.MetricsException;
+
+/**
+ *  The metrics configuration runtime exception
+ */
+public class MetricsConfigException extends MetricsException {
+  private static final long serialVersionUID = 1L;
+
+  MetricsConfigException(String message) {
+    super(message);
+  }
+
+  MetricsConfigException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  MetricsConfigException(Throwable cause) {
+    super(cause);
+  }
+
+}

+ 145 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsRecordBuilderImpl.java

@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsFilter;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsTag;
+import static org.apache.hadoop.metrics2.lib.MetricsRegistry.*;
+
+class MetricsRecordBuilderImpl extends MetricsRecordBuilder {
+  private final long timestamp;
+  private final String name;
+  private final List<Metric> metrics;
+  private final List<MetricsTag> tags;
+  private final MetricsFilter recordFilter, metricFilter;
+  private final boolean acceptable;
+
+  MetricsRecordBuilderImpl(String name, MetricsFilter rf, MetricsFilter mf,
+                           boolean acceptable) {
+    timestamp = System.currentTimeMillis();
+    this.name = name;
+    metrics = new ArrayList<Metric>();
+    tags = new ArrayList<MetricsTag>();
+    recordFilter = rf;
+    metricFilter = mf;
+    this.acceptable = acceptable;
+  }
+
+  @Override
+  public MetricsRecordBuilder tag(String name, String description,
+                                  String value) {
+    if (acceptable) {
+      tags.add(new MetricsTag(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder addCounter(String name, String description,
+                                         int value) {
+    if (acceptable && (metricFilter == null || metricFilter.accepts(name))) {
+      metrics.add(new MetricCounterInt(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder addCounter(String name, String description,
+                                         long value) {
+    if (acceptable && (metricFilter == null || metricFilter.accepts(name))) {
+      metrics.add(new MetricCounterLong(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder addGauge(String name, String description,
+                                       int value) {
+    if (acceptable && (metricFilter == null || metricFilter.accepts(name))) {
+      metrics.add(new MetricGaugeInt(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder addGauge(String name, String description,
+                                       long value) {
+    if (acceptable && (metricFilter == null || metricFilter.accepts(name))) {
+      metrics.add(new MetricGaugeLong(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder addGauge(String name, String description,
+                                       float value) {
+    if (acceptable && (metricFilter == null || metricFilter.accepts(name))) {
+      metrics.add(new MetricGaugeFloat(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder addGauge(String name, String description,
+                                       double value) {
+    if (acceptable && (metricFilter == null || metricFilter.accepts(name))) {
+      metrics.add(new MetricGaugeDouble(name, description, value));
+    }
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder add(MetricsTag tag) {
+    tags.add(tag);
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder add(Metric metric) {
+    metrics.add(metric);
+    return this;
+  }
+
+  @Override
+  public MetricsRecordBuilder setContext(String value) {
+    return tag(CONTEXT_KEY, CONTEXT_DESC, value);
+  }
+
+  public MetricsRecordImpl getRecord() {
+    if (acceptable && (recordFilter == null || recordFilter.accepts(tags))) {
+      return new MetricsRecordImpl(name, timestamp, tags(), metrics());
+    }
+    return null;
+  }
+
+  List<MetricsTag> tags() {
+    return Collections.unmodifiableList(tags);
+  }
+
+  List<Metric> metrics() {
+    return Collections.unmodifiableList(metrics);
+  }
+
+}

+ 73 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsRecordFiltered.java

@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.Iterator;
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsFilter;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.util.TryIterator;
+
+class MetricsRecordFiltered implements MetricsRecord {
+
+  private final MetricsRecord delegate;
+  private final MetricsFilter filter;
+
+  MetricsRecordFiltered(MetricsRecord delegate, MetricsFilter filter) {
+    this.delegate = delegate;
+    this.filter = filter;
+  }
+
+  public long timestamp() {
+    return delegate.timestamp();
+  }
+
+  public String name() {
+    return delegate.name();
+  }
+
+  public String context() {
+    return delegate.context();
+  }
+
+  public Iterable<MetricsTag> tags() {
+    return delegate.tags();
+  }
+
+  public Iterable<Metric> metrics() {
+    return new Iterable<Metric>() {
+      final Iterator<Metric> it = delegate.metrics().iterator();
+      public Iterator<Metric> iterator() {
+        return new TryIterator<Metric>() {
+          public Metric tryNext() {
+            if (it.hasNext()) do {
+              Metric next = it.next();
+              if (filter.accepts(next.name())) {
+                return next;
+              }
+            } while (it.hasNext());
+            return done();
+          }
+        };
+      }
+    };
+  }
+
+}

+ 111 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsRecordImpl.java

@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.util.Contracts;
+
+public class MetricsRecordImpl implements MetricsRecord {
+
+  protected static final String CONTEXT_KEY = "context";
+  protected static final String DEFAULT_CONTEXT = "default";
+
+  private final long timestamp;
+  private final String name;
+  private final Iterable<MetricsTag> tags;
+  private final Iterable<Metric> metrics;
+
+  /**
+   * Construct a metrics record
+   * @param name  of the record
+   * @param timestamp of the record
+   * @param tags  of the record
+   * @param metrics of the record
+   */
+  public MetricsRecordImpl(String name, long timestamp,
+                           Iterable<MetricsTag> tags,
+                           Iterable<Metric> metrics) {
+    this.timestamp = Contracts.checkArg(timestamp, timestamp > 0, "timestamp");
+    this.name = Contracts.checkNotNull(name, "name");
+    this.tags = Contracts.checkNotNull(tags, "tags");
+    this.metrics = Contracts.checkNotNull(metrics, "metrics");
+  }
+
+  public long timestamp() {
+    return timestamp;
+  }
+
+  public String name() {
+    return name;
+  }
+
+  public String context() {
+    // usually the first tag
+    for (MetricsTag t : tags) {
+      if (t.name().equals(CONTEXT_KEY)) {
+        return String.valueOf(t.value());
+      }
+    }
+    return DEFAULT_CONTEXT;
+  }
+
+  public Iterable<MetricsTag> tags() {
+    return tags;
+  }
+
+  public Iterable<Metric> metrics() {
+    return metrics;
+  }
+
+  // Mostly for testing
+  @Override public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    final MetricsRecordImpl other = (MetricsRecordImpl) obj;
+    if (this.timestamp != other.timestamp()) {
+      return false;
+    }
+    if (!this.name.equals(other.name())) {
+      return false;
+    }
+    if (!this.tags.equals(other.tags())) {
+      return false;
+    }
+    if (!this.metrics.equals(other.metrics())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override public int hashCode() {
+    return name.hashCode();
+  }
+
+  @Override public String toString() {
+    return "MetricsRecordImpl{" + "timestamp=" + timestamp + " name='" + name +
+        "' tags=" + tags + " metrics=" + metrics + "}\n";
+  }
+
+}

+ 197 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java

@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import org.apache.hadoop.metrics2.lib.MetricMutableGaugeInt;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MetricMutableCounterInt;
+import org.apache.hadoop.metrics2.lib.MetricMutableStat;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.util.Contracts;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.metrics2.MetricsFilter;
+import org.apache.hadoop.metrics2.MetricsSink;
+
+/**
+ * An adapter class for metrics sink and associated filters
+ */
+class MetricsSinkAdapter {
+
+  private final Log LOG = LogFactory.getLog(MetricsSinkAdapter.class);
+  private final String name, description, context;
+  private final MetricsSink sink;
+  private final MetricsFilter sourceFilter, recordFilter, metricFilter;
+  private final SinkQueue<MetricsBuffer> queue;
+  private final Thread sinkThread;
+  private volatile boolean stopping = false;
+  private volatile boolean inError = false;
+  private final int period, firstRetryDelay, retryCount;
+  private final float retryBackoff;
+  private final MetricsRegistry registry = new MetricsRegistry("sinkadapter");
+  private final MetricMutableStat latency;
+  private final MetricMutableCounterInt dropped;
+  private final MetricMutableGaugeInt qsize;
+
+  private final Consumer<MetricsBuffer> consumer =
+      new Consumer<MetricsBuffer>() {
+        public void consume(MetricsBuffer buffer) {
+          publishMetrics(buffer);
+        }
+      };
+
+  MetricsSinkAdapter(String name, String description, MetricsSink sink,
+                     String context, MetricsFilter sourceFilter,
+                     MetricsFilter recordFilter, MetricsFilter metricFilter,
+                     int period, int queueCapacity, int retryDelay,
+                     float retryBackoff, int retryCount) {
+    this.name = Contracts.checkNotNull(name, "name");
+    this.description = description;
+    this.sink = Contracts.checkNotNull(sink, "sink object");
+    this.context = context;
+    this.sourceFilter = sourceFilter;
+    this.recordFilter = recordFilter;
+    this.metricFilter = metricFilter;
+    this.period = Contracts.checkArg(period, period > 0, "period");
+    firstRetryDelay =
+        Contracts.checkArg(retryDelay, retryDelay > 0, "retry delay");
+    this.retryBackoff =
+        Contracts.checkArg(retryBackoff, retryBackoff > 1, "backoff factor");
+    this.retryCount = retryCount;
+    this.queue = new SinkQueue<MetricsBuffer>(
+        Contracts.checkArg(queueCapacity, queueCapacity > 0, "queue capacity"));
+    latency = registry.newStat(name +"_latency", "End to end latency",
+                               "ops", "time");
+    dropped = registry.newCounter(name +"_dropped", "Dropped updates", 0);
+    qsize = registry.newGauge(name + "_qsize", "Queue size", 0);
+
+    sinkThread = new Thread() {
+      @Override public void run() {
+        publishMetricsFromQueue();
+      }
+    };
+    sinkThread.setName(name);
+  }
+
+  boolean putMetrics(MetricsBuffer buffer, long logicalTime) {
+    if (logicalTime % period == 0) {
+      LOG.debug("enqueue, logicalTime="+ logicalTime);
+      if (queue.enqueue(buffer)) return true;
+      dropped.incr();
+      return false;
+    }
+    return true; // OK
+  }
+
+  void publishMetricsFromQueue() {
+    int retryDelay = firstRetryDelay;
+    int n = retryCount;
+    while (!stopping) {
+      try {
+        queue.consumeAll(consumer);
+        retryDelay = firstRetryDelay;
+        n = retryCount;
+        inError = false;
+      }
+      catch (InterruptedException e) {
+        LOG.info(name +" thread interrupted.");
+      }
+      catch (Exception e) {
+        if (n > 0) {
+          if (!inError) {
+            LOG.error("Got sink exception, retry in "+ retryDelay +"s", e);
+          }
+          retryDelay *= retryBackoff;
+          try { Thread.sleep(retryDelay * 1000); }
+          catch (InterruptedException e2) {
+            LOG.info(name +" thread interrupted while waiting for retry", e2);
+          }
+          --n;
+        }
+        else {
+          if (!inError) {
+            LOG.error("Got sink exception and over retry limit!", e);
+          }
+          queue.clear();
+          inError = true; // Don't keep complaining ad infinitum
+        }
+      }
+    }
+  }
+
+  void publishMetrics(MetricsBuffer buffer) {
+    long ts = 0;
+    for (MetricsBuffer.Entry entry : buffer) {
+      if (sourceFilter == null || sourceFilter.accepts(entry.name())) {
+        for (MetricsRecordImpl record : entry.records()) {
+          if ((context == null || context.equals(record.context())) &&
+              (recordFilter == null || recordFilter.accepts(record))) {
+            if (LOG.isDebugEnabled()) {
+              LOG.debug("Pushing record "+ entry.name() +"."+ record.context() +
+                        "."+ record.name() +" to "+ name);
+            }
+            sink.putMetrics(metricFilter == null
+                ? record
+                : new MetricsRecordFiltered(record, metricFilter));
+            if (ts == 0) ts = record.timestamp();
+          }
+        }
+      }
+    }
+    if (ts > 0) {
+      sink.flush();
+      latency.add(System.currentTimeMillis() - ts);
+    }
+    LOG.debug("Done");
+  }
+
+  void start() {
+    sinkThread.start();
+    LOG.info("Sink "+ name +" started");
+  }
+
+  void stop() {
+    stopping = true;
+    sinkThread.interrupt();
+    try {
+      sinkThread.join();
+    }
+    catch (InterruptedException e) {
+      LOG.warn("Stop interrupted", e);
+    }
+  }
+
+  String name() {
+    return name;
+  }
+
+  String description() {
+    return description;
+  }
+
+  void sample(MetricsRecordBuilder rb, boolean all) {
+    registry.snapshot(rb, all);
+  }
+
+  MetricsSink sink() {
+    return sink;
+  }
+
+}

+ 254 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java

@@ -0,0 +1,254 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.HashMap;
+import javax.management.Attribute;
+import javax.management.AttributeList;
+import javax.management.AttributeNotFoundException;
+import javax.management.DynamicMBean;
+import javax.management.InvalidAttributeValueException;
+import javax.management.MBeanException;
+import javax.management.MBeanInfo;
+import javax.management.ObjectName;
+import javax.management.ReflectionException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsFilter;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsTag;
+import static org.apache.hadoop.metrics2.impl.MetricsConfig.*;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.metrics2.util.Contracts;
+
+/**
+ * An adapter class for metrics source and associated filter and jmx impl
+ */
+class MetricsSourceAdapter implements DynamicMBean {
+
+  private static final Log LOG = LogFactory.getLog(MetricsSourceAdapter.class);
+
+  private final String prefix, name;
+  private final MetricsSource source;
+  private final MetricsFilter recordFilter, metricFilter;
+  private final HashMap<String, Attribute> attrCache;
+  private final MBeanInfoBuilder infoBuilder;
+  private final Iterable<MetricsTag> injectedTags;
+
+  private Iterable<MetricsRecordImpl> lastRecs;
+  private long jmxCacheTS;
+  private int jmxCacheTTL;
+  private MBeanInfo infoCache;
+  private ObjectName mbeanName;
+
+  MetricsSourceAdapter(String prefix, String name, String description,
+                       MetricsSource source, Iterable<MetricsTag> injectedTags,
+                       MetricsFilter recordFilter, MetricsFilter metricFilter,
+                       int jmxCacheTTL) {
+    this.prefix = Contracts.checkNotNull(prefix, "prefix");
+    this.name = Contracts.checkNotNull(name, "name");
+    this.source = Contracts.checkNotNull(source, "source");
+    attrCache = new HashMap<String, Attribute>();
+    infoBuilder = new MBeanInfoBuilder(name, description);
+    this.injectedTags = injectedTags;
+    this.recordFilter = recordFilter;
+    this.metricFilter = metricFilter;
+    this.jmxCacheTTL = Contracts.checkArg(jmxCacheTTL, jmxCacheTTL > 0,
+                                          "jmxCacheTTL");
+  }
+
+  MetricsSourceAdapter(String prefix, String name, String description,
+                       MetricsSource source, Iterable<MetricsTag> injectedTags,
+                       int period, MetricsConfig conf) {
+    this(prefix, name, description, source, injectedTags,
+        (MetricsFilter) conf.getPlugin(RECORD_FILTER_KEY),
+        (MetricsFilter) conf.getPlugin(METRIC_FILTER_KEY), period);
+  }
+
+  void start() {
+    if (mbeanName != null) {
+      LOG.warn("MBean Source "+ name +" already initialized!");
+    }
+    mbeanName = MBeans.register(prefix, name, this);
+    LOG.debug("MBean for source "+ name +" registered.", new Throwable());
+  }
+
+  @Override
+  public synchronized Object getAttribute(String attribute)
+      throws AttributeNotFoundException, MBeanException, ReflectionException {
+    updateJmxCache();
+    Attribute a = attrCache.get(attribute);
+    if (a == null) {
+      throw new AttributeNotFoundException(attribute +" not found");
+    }
+    if (LOG.isDebugEnabled()) {
+      LOG.debug(attribute +": "+ a.getName() +"="+ a.getValue());
+    }
+    return a;
+  }
+
+  public void setAttribute(Attribute attribute)
+      throws AttributeNotFoundException, InvalidAttributeValueException,
+             MBeanException, ReflectionException {
+    throw new UnsupportedOperationException("Metrics are read-only.");
+  }
+
+  @Override
+  public synchronized AttributeList getAttributes(String[] attributes) {
+    updateJmxCache();
+    AttributeList ret = new AttributeList();
+    for (String key : attributes) {
+      Attribute attr = attrCache.get(key);
+      if (LOG.isDebugEnabled()) {
+        LOG.debug(key +": "+ attr.getName() +"="+ attr.getValue());
+      }
+      ret.add(attr);
+    }
+    return ret;
+  }
+
+  @Override
+  public AttributeList setAttributes(AttributeList attributes) {
+    throw new UnsupportedOperationException("Metrics are read-only.");
+  }
+
+  @Override
+  public Object invoke(String actionName, Object[] params, String[] signature)
+      throws MBeanException, ReflectionException {
+    throw new UnsupportedOperationException("Not supported yet.");
+  }
+
+  @Override
+  public synchronized MBeanInfo getMBeanInfo() {
+    updateJmxCache();
+    return infoCache;
+  }
+
+  private void updateJmxCache() {
+    if (System.currentTimeMillis() - jmxCacheTS >= jmxCacheTTL) {
+      if (lastRecs == null) {
+        MetricsBuilderImpl builder = new MetricsBuilderImpl();
+        getMetrics(builder, true);
+      }
+      int cacheSize = attrCache.size(); // because updateAttrCache changes it!
+      int numMetrics = updateAttrCache();
+      if (cacheSize < numMetrics) {
+        updateInfoCache();
+      }
+      jmxCacheTS = System.currentTimeMillis();
+      lastRecs = null;
+    }
+  }
+
+  Iterable<MetricsRecordImpl> getMetrics(MetricsBuilderImpl builder,
+                                         boolean all) {
+    builder.setRecordFilter(recordFilter).setMetricFilter(metricFilter);
+    synchronized(this) {
+      if (lastRecs == null) {
+        all = true; // Get all the metrics to populate the sink caches
+      }
+    }
+    source.getMetrics(builder, all);
+    for (MetricsRecordBuilderImpl rb : builder) {
+      for (MetricsTag t : injectedTags) {
+        rb.add(t);
+      }
+    }
+    synchronized(this) {
+      lastRecs = builder.getRecords();
+      return lastRecs;
+    }
+  }
+
+  synchronized void stop() {
+    MBeans.unregister(mbeanName);
+    mbeanName = null;
+  }
+
+  synchronized void refreshMBean() {
+    MBeans.unregister(mbeanName);
+    mbeanName = MBeans.register(prefix, name, this);
+  }
+
+  private void updateInfoCache() {
+    LOG.debug("Updating info cache...");
+    infoCache = infoBuilder.reset(lastRecs).get();
+    LOG.debug("Done");
+  }
+
+  private int updateAttrCache() {
+    LOG.debug("Updating attr cache...");
+    int recNo = 0;
+    int numMetrics = 0;
+    for (MetricsRecordImpl record : lastRecs) {
+      for (MetricsTag t : record.tags()) {
+        setAttrCacheTag(t, recNo);
+        ++numMetrics;
+      }
+      for (Metric m : record.metrics()) {
+        setAttrCacheMetric(m, recNo);
+        ++numMetrics;
+      }
+      ++recNo;
+    }
+    LOG.debug("Done. numMetrics="+ numMetrics);
+    return numMetrics;
+  }
+
+  private static String tagName(String name, int recNo) {
+    StringBuilder sb = new StringBuilder(name.length() + 16);
+    sb.append("tag.").append(name);
+    if (recNo > 0) {
+      sb.append('.').append(recNo);
+    }
+    return sb.toString();
+  }
+
+  private void setAttrCacheTag(MetricsTag tag, int recNo) {
+    String key = tagName(tag.name(), recNo);
+    attrCache.put(key, new Attribute(key, tag.value()));
+  }
+
+  private static String metricName(String name, int recNo) {
+    if (recNo == 0) {
+      return name;
+    }
+    StringBuilder sb = new StringBuilder(name.length() + 12);
+    sb.append(name);
+    if (recNo > 0) {
+      sb.append('.').append(recNo);
+    }
+    return sb.toString();
+  }
+
+  private void setAttrCacheMetric(Metric metric, int recNo) {
+    String key = metricName(metric.name(), recNo);
+    attrCache.put(key, new Attribute(key, metric.value()));
+  }
+
+  String name() {
+    return name;
+  }
+
+  MetricsSource source() {
+    return source;
+  }
+
+}

+ 514 - 0
src/core/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java

@@ -0,0 +1,514 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.io.StringWriter;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Timer;
+import java.util.TimerTask;
+import javax.management.ObjectName;
+
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.math.util.MathUtils;
+
+import org.apache.hadoop.metrics2.MetricsBuilder;
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsFilter;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSink;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.MetricsTag;
+import org.apache.hadoop.metrics2.lib.MetricMutableCounterLong;
+import org.apache.hadoop.metrics2.lib.MetricMutableStat;
+import static org.apache.hadoop.metrics2.impl.MetricsConfig.*;
+import org.apache.hadoop.metrics2.util.MBeans;
+import org.apache.hadoop.metrics2.util.Contracts;
+
+/**
+ * A base class for metrics system singletons
+ */
+public class MetricsSystemImpl implements MetricsSystem {
+
+  private static final Log LOG = LogFactory.getLog(MetricsSystemImpl.class);
+  static final String MS_CONTEXT = "metricssystem";
+  static final String NUM_SOURCES_KEY = "num_sources";
+  static final String NUM_SOURCES_DESC = "Number of metrics sources";
+  static final String NUM_SINKS_KEY = "num_sinks";
+  static final String NUM_SINKS_DESC = "Number of metrics sinks";
+  static final String MS_NAME = "MetricsSystem";
+  static final String MS_STATS_NAME = MS_NAME +",sub=Stats";
+  static final String MS_STATS_DESC = "Metrics system metrics";
+  static final String MS_CONTROL_NAME = MS_NAME +",sub=Control";
+
+  private final Map<String, MetricsSourceAdapter> sources;
+  private final Map<String, MetricsSinkAdapter> sinks;
+  private final List<Callback> callbacks;
+  private final MetricsBuilderImpl metricsBuilder;
+  private final MetricMutableStat sampleStat =
+      new MetricMutableStat("sample", "sampling stats", "ops", "time", true);
+  private final MetricMutableStat publishStat =
+      new MetricMutableStat("publish", "publishing stats", "ops", "time", true);
+  private final MetricMutableCounterLong dropStat =
+      new MetricMutableCounterLong("dropped_pub_all",
+        "number of dropped updates by all sinks", 0L);
+  private final List<MetricsTag> injectedTags;
+
+  // Things that are changed by init()/start()/stop()
+  private String prefix;
+  private MetricsFilter sourceFilter;
+  private MetricsConfig config;
+  private Map<String, MetricsConfig> sourceConfigs, sinkConfigs;
+  private boolean monitoring = false;
+  private Timer timer;
+  private int period; // seconds
+  private long logicalTime; // number of timer invocations * period
+  private ObjectName mbeanName;
+  private boolean publishSelfMetrics = true;
+  private MetricsSourceAdapter sysSource;
+
+  /**
+   * Construct the metrics system
+   * @param prefix  for the system
+   */
+  public MetricsSystemImpl(String prefix) {
+    this.prefix = prefix;
+    sources = new LinkedHashMap<String, MetricsSourceAdapter>();
+    sinks = new LinkedHashMap<String, MetricsSinkAdapter>();
+    sourceConfigs = new HashMap<String, MetricsConfig>();
+    sinkConfigs = new HashMap<String, MetricsConfig>();
+    callbacks = new ArrayList<Callback>();
+    injectedTags = new ArrayList<MetricsTag>();
+    metricsBuilder = new MetricsBuilderImpl();
+    if (prefix != null) {
+      // prefix could be null for default ctor, which requires init later
+      initSystemMBean();
+    }
+  }
+
+  /**
+   * Construct the system but not initializing (read config etc.) it.
+   */
+  public MetricsSystemImpl() {
+    this(null);
+  }
+
+  /**
+   * Initialized the metrics system with a prefix.
+   * @param prefix  the system will look for configs with the prefix
+   */
+  public synchronized void init(String prefix) {
+    if (monitoring) {
+      LOG.warn(this.prefix +" metrics system already initialized!");
+      return;
+    }
+    Contracts.checkState(this.prefix == null, "prefix should be null so far.");
+    this.prefix = Contracts.checkNotNull(prefix, "prefix");
+    try { start(); }
+    catch (MetricsConfigException e) {
+      // Usually because hadoop-metrics2.properties is missing
+      // We can always start the metrics system later via JMX.
+      LOG.warn("Metrics system not started!", e);
+    }
+    initSystemMBean();
+  }
+
+  @Override
+  public synchronized void start() {
+    Contracts.checkNotNull(prefix, "prefix");
+    if (monitoring) {
+      LOG.warn(prefix +" metrics system already started!",
+               new MetricsException("Illegal start"));
+      return;
+    }
+    for (Callback cb : callbacks) cb.preStart();
+    configure(prefix);
+    startTimer();
+    monitoring = true;
+    LOG.info(prefix +" metrics system started");
+    for (Callback cb : callbacks) cb.postStart();
+  }
+
+  @Override
+  public synchronized void stop() {
+    if (!monitoring) {
+      LOG.warn(prefix +" metrics system not yet started!",
+               new MetricsException("Illegal stop"));
+      return;
+    }
+    for (Callback cb : callbacks) cb.preStop();
+    LOG.info("Stopping "+ prefix +" metrics system...");
+    stopTimer();
+    stopSources();
+    stopSinks();
+    clearConfigs();
+    monitoring = false;
+    LOG.info(prefix +" metrics system stopped.");
+    for (Callback cb : callbacks) cb.postStop();
+  }
+
+  @Override
+  public synchronized <T extends MetricsSource> T register(final String name,
+      final String desc, final T source) {
+    if (monitoring) {
+      registerSource(name, desc, source);
+    }
+    // We want to re-register the source to pick up new config when the
+    // metrics system restarts.
+    register(new AbstractCallback() {
+
+      @Override public void postStart() {
+        registerSource(name, desc, source);
+      }
+
+    });
+    LOG.debug("Registered source "+ name);
+    return source;
+  }
+
+  synchronized void registerSource(String name, String desc,
+                                   MetricsSource source) {
+    Contracts.checkNotNull(config, "config");
+    MetricsSourceAdapter sa = sources.get(name);
+    if (sa != null) {
+      LOG.warn("Source name "+name+" already exists!");
+      return;
+    }
+    MetricsConfig conf = sourceConfigs.get(name);
+    sa = conf != null
+        ? new MetricsSourceAdapter(prefix, name, desc, source,
+                                   injectedTags, period, conf)
+        : new MetricsSourceAdapter(prefix, name, desc, source,
+          injectedTags, period, config.subset(SOURCE_KEY));
+    sources.put(name, sa);
+    sa.start();
+  }
+
+  @Override
+  public synchronized <T extends MetricsSink> T register(final String name,
+      final String description, final T sink) {
+    if (config != null) {
+      registerSink(name, description, sink);
+    }
+    // We want to re-register the sink to pick up new config
+    // when the metrics system restarts.
+    register(new AbstractCallback() {
+
+      @Override public void postStart() {
+        registerSink(name, description, sink);
+      }
+
+    });
+    LOG.debug("Registered sink "+ name);
+    return sink;
+  }
+
+  synchronized void registerSink(String name, String desc, MetricsSink sink) {
+    Contracts.checkNotNull(config, "config");
+    MetricsSinkAdapter sa = sinks.get(name);
+    if (sa != null) {
+      LOG.warn("Sink name "+name+" already exists!");
+      return;
+    }
+    MetricsConfig conf = sinkConfigs.get(name);
+    sa = conf != null
+        ? newSink(name, desc, sink, conf)
+        : newSink(name, desc, sink, config.subset(SINK_KEY));
+    sinks.put(name, sa);
+    sa.start();
+  }
+
+  @Override
+  public synchronized void register(final Callback callback) {
+    callbacks.add((Callback) Proxy.newProxyInstance(
+        callback.getClass().getClassLoader(), new Class<?>[] { Callback.class },
+        new InvocationHandler() {
+          public Object invoke(Object proxy, Method method, Object[] args)
+              throws Throwable {
+            try {
+              return method.invoke(callback, args);
+            }
+            catch (Exception e) {
+              LOG.warn("Caught exception in callback "+ method.getName(), e);
+            }
+            return null;
+          }
+        }));
+  }
+
+  @Override
+  public synchronized void refreshMBeans() {
+    for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
+      entry.getValue().refreshMBean();
+    }
+  }
+
+  @Override
+  public synchronized String currentConfig() {
+    PropertiesConfiguration saver = new PropertiesConfiguration();
+    StringWriter writer = new StringWriter();
+    saver.copy(config);
+    try { saver.save(writer); }
+    catch (Exception e) {
+      throw new MetricsConfigException("Error stringify config", e);
+    }
+    return writer.toString();
+  }
+
+  private synchronized void startTimer() {
+    if (timer != null) {
+      LOG.warn(prefix +" metrics system timer already started!");
+      return;
+    }
+    logicalTime = 0;
+    long millis = period * 1000;
+    timer = new Timer("Timer for '"+ prefix +"' metrics system", true);
+    timer.scheduleAtFixedRate(new TimerTask() {
+          public void run() {
+            try {
+              onTimerEvent();
+            }
+            catch (Exception e) {
+              LOG.warn(e);
+            }
+          }
+        }, millis, millis);
+    LOG.info("Scheduled sampling period at "+ period +" second(s).");
+  }
+
+  synchronized void onTimerEvent() {
+    logicalTime += period;
+    if (sinks.size() > 0) {
+      publishMetrics(sampleMetrics());
+    }
+  }
+
+  /**
+   * Sample all the sources for a snapshot of metrics/tags
+   * @return  the metrics buffer containing the snapshot
+   */
+  synchronized MetricsBuffer sampleMetrics() {
+    metricsBuilder.clear();
+    MetricsBufferBuilder bufferBuilder = new MetricsBufferBuilder();
+
+    for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
+      if (sourceFilter == null || sourceFilter.accepts(entry.getKey())) {
+        sampleMetrics(entry.getValue(), bufferBuilder);
+      }
+    }
+    if (publishSelfMetrics) {
+      sampleMetrics(sysSource, bufferBuilder);
+    }
+    MetricsBuffer buffer = bufferBuilder.get();
+    return buffer;
+  }
+
+  private void sampleMetrics(MetricsSourceAdapter sa,
+                             MetricsBufferBuilder bufferBuilder) {
+    long startTime = System.currentTimeMillis();
+    bufferBuilder.add(sa.name(), sa.getMetrics(metricsBuilder, false));
+    metricsBuilder.clear();
+    sampleStat.add(System.currentTimeMillis() - startTime);
+    LOG.debug("Sampled source "+ sa.name());
+  }
+
+  /**
+   * Publish a metrics snapshot to all the sinks
+   * @param buffer  the metrics snapshot to publish
+   */
+  synchronized void publishMetrics(MetricsBuffer buffer) {
+    int dropped = 0;
+    for (Entry<String, MetricsSinkAdapter> entry : sinks.entrySet()) {
+      long startTime = System.currentTimeMillis();
+      dropped += entry.getValue().putMetrics(buffer, logicalTime) ? 0 : 1;
+      publishStat.add(System.currentTimeMillis() - startTime);
+    }
+    dropStat.incr(dropped);
+  }
+
+  private synchronized void stopTimer() {
+    if (timer == null) {
+      LOG.warn(prefix +" metrics system timer already stopped!");
+      return;
+    }
+    timer.cancel();
+    timer = null;
+  }
+
+  private synchronized void stopSources() {
+    for (Entry<String, MetricsSourceAdapter> entry : sources.entrySet()) {
+      MetricsSourceAdapter sa = entry.getValue();
+      LOG.info("Stopping metrics source "+ entry.getKey() +"("+
+                sa.source().getClass().getName() +")");
+      sa.stop();
+    }
+    sources.clear();
+  }
+
+  private synchronized void stopSinks() {
+    for (Entry<String, MetricsSinkAdapter> entry : sinks.entrySet()) {
+      MetricsSinkAdapter sa = entry.getValue();
+      LOG.info("Stopping metrics sink "+ entry.getKey() +"("+
+               sa.sink().getClass().getName() +")");
+      sa.stop();
+    }
+    sinks.clear();
+  }
+
+  private synchronized void configure(String prefix) {
+    config = MetricsConfig.create(prefix);
+    configureSinks();
+    configureSources();
+    configureSystem();
+  }
+
+  private synchronized void configureSystem() {
+    injectedTags.add(new MetricsTag("hostName", "Local hostname",
+                                    getHostname()));
+  }
+
+  private synchronized void configureSinks() {
+    Map<String, MetricsConfig> confs = config.getInstanceConfigs(SINK_KEY);
+    int confPeriod = 0;
+    for (Entry<String, MetricsConfig> entry : confs.entrySet()) {
+      MetricsConfig conf = entry.getValue();
+      int sinkPeriod = conf.getInt(PERIOD_KEY, PERIOD_DEFAULT);
+      confPeriod = confPeriod == 0 ? sinkPeriod
+                                   : MathUtils.gcd(confPeriod, sinkPeriod);
+      String sinkName = conf.getString(NAME_KEY);
+      if (sinkName != null && !sinkName.isEmpty()) {
+        // named config is for internally registered sinks
+        sinkConfigs.put(sinkName, conf);
+      }
+      else {
+        sinkName = "sink"+ entry.getKey();
+      }
+      try {
+        MetricsSinkAdapter sa = newSink(sinkName,
+            conf.getString(DESC_KEY, sinkName), conf);
+        sa.start();
+        sinks.put(sinkName, sa);
+      }
+      catch (Exception e) {
+        LOG.warn("Error creating "+ sinkName, e);
+      }
+    }
+    period = confPeriod > 0 ? confPeriod
+                            : config.getInt(PERIOD_KEY, PERIOD_DEFAULT);
+  }
+
+  static MetricsSinkAdapter newSink(String name, String desc, MetricsSink sink,
+                                    MetricsConfig conf) {
+    return new MetricsSinkAdapter(name, desc, sink, conf.getString(CONTEXT_KEY),
+        (MetricsFilter) conf.getPlugin(SOURCE_FILTER_KEY),
+        (MetricsFilter) conf.getPlugin(RECORD_FILTER_KEY),
+        (MetricsFilter) conf.getPlugin(METRIC_FILTER_KEY),
+        conf.getInt(PERIOD_KEY, PERIOD_DEFAULT),
+        conf.getInt(QUEUE_CAPACITY_KEY, QUEUE_CAPACITY_DEFAULT),
+        conf.getInt(RETRY_DELAY_KEY, RETRY_DELAY_DEFAULT),
+        conf.getFloat(RETRY_BACKOFF_KEY, RETRY_BACKOFF_DEFAULT),
+        conf.getInt(RETRY_COUNT_KEY, RETRY_COUNT_DEFAULT));
+  }
+
+  static MetricsSinkAdapter newSink(String name, String desc,
+                                    MetricsConfig conf) {
+    return newSink(name, desc, (MetricsSink) conf.getPlugin(""), conf);
+  }
+
+  private void configureSources() {
+    sourceFilter =
+        (MetricsFilter) config.getPlugin(PREFIX_DEFAULT + SOURCE_FILTER_KEY);
+    Map<String, MetricsConfig> confs = config.getInstanceConfigs(SOURCE_KEY);
+    for (Entry<String, MetricsConfig> entry : confs.entrySet()) {
+     sourceConfigs.put(entry.getKey(), entry.getValue());
+    }
+    registerSystemSource();
+  }
+
+  private void clearConfigs() {
+    sinkConfigs.clear();
+    sourceConfigs.clear();
+    injectedTags.clear();
+    config = null;
+  }
+
+  static String getHostname() {
+    try {
+      return InetAddress.getLocalHost().getHostName();
+    }
+    catch (Exception e) {
+      LOG.error("Error getting localhost name. Using 'localhost'...", e);
+    }
+    return "localhost";
+  }
+
+  private void registerSystemSource() {
+    sysSource = new MetricsSourceAdapter(prefix, MS_STATS_NAME, MS_STATS_DESC,
+        new MetricsSource() {
+      @Override
+      public void getMetrics(MetricsBuilder builder, boolean all) {
+        int numSources, numSinks;
+        synchronized(MetricsSystemImpl.this) {
+          numSources = sources.size();
+          numSinks = sinks.size();
+        }
+        MetricsRecordBuilder rb = builder.addRecord(MS_NAME)
+            .setContext(MS_CONTEXT)
+            .addGauge(NUM_SOURCES_KEY, NUM_SOURCES_DESC, numSources)
+            .addGauge(NUM_SINKS_KEY, NUM_SINKS_DESC, numSinks);
+        synchronized(MetricsSystemImpl.this) {
+          for (Entry<String, MetricsSinkAdapter> entry : sinks.entrySet()) {
+            entry.getValue().sample(rb, all);
+          }
+        }
+        sampleStat.snapshot(rb, all);
+        publishStat.snapshot(rb, all);
+        dropStat.snapshot(rb, all);
+      }
+    }, injectedTags, null, null, period);
+    sysSource.start();
+  }
+
+  private void initSystemMBean() {
+    mbeanName = MBeans.register(prefix, MS_CONTROL_NAME, this);
+  }
+
+  @Override
+  public synchronized void shutdown() {
+    if (monitoring) {
+      try { stop(); }
+      catch (Exception e) {
+        LOG.warn("Error stopping the metrics system", e);
+      }
+    }
+    MBeans.unregister(mbeanName);
+  }
+
+}

+ 162 - 0
src/core/org/apache/hadoop/metrics2/impl/SinkQueue.java

@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.impl;
+
+import java.util.ConcurrentModificationException;
+
+/**
+ * A half-blocking (nonblocking for producers, blocking for consumers) queue
+ * for metrics sinks.
+ *
+ * New elements are dropped when the queue is full to preserve "interesting"
+ * elements at the onset of queue filling events
+ */
+class SinkQueue<T> {
+  // A fixed size circular buffer to minimize garbage
+  private final T[] data;
+  private int head; // head position
+  private int tail; // tail position
+  private int size; // number of elements
+  private Thread currentConsumer = null;
+
+  @SuppressWarnings("unchecked")
+  SinkQueue(int capacity) {
+    this.data = (T[]) new Object[Math.max(1, capacity)];
+    head = tail = size = 0;
+  }
+
+  synchronized boolean enqueue(T e) {
+    if (data.length == size) {
+      return false;
+    }
+    ++size;
+    tail = (tail + 1) % data.length;
+    data[tail] = e;
+    notify();
+    return true;
+  }
+
+  /**
+   * Consume one element, will block if queue is empty
+   * Only one consumer at a time is allowed
+   * @param consumer  the consumer callback object
+   */
+  void consume(Consumer<T> consumer) throws InterruptedException {
+    T e = waitForData();
+
+    try {
+      consumer.consume(e);  // can take forever
+      _dequeue();
+    }
+    finally {
+      clearConsumer();
+    }
+  }
+
+  /**
+   * Consume all the elements, will block if queue is empty
+   * @param consumer  the consumer callback object
+   * @throws InterruptedException
+   */
+  void consumeAll(Consumer<T> consumer) throws InterruptedException {
+    waitForData();
+
+    try {
+      for (int i = size(); i-- > 0; ) {
+        consumer.consume(front()); // can take forever
+        _dequeue();
+      }
+    }
+    finally {
+      clearConsumer();
+    }
+  }
+
+  /**
+   * Dequeue one element from head of the queue, will block if queue is empty
+   * @return  the first element
+   * @throws InterruptedException
+   */
+  synchronized T dequeue() throws InterruptedException {
+    checkConsumer();
+
+    while (0 == size) {
+      wait();
+    }
+    return _dequeue();
+  }
+
+  private synchronized T waitForData() throws InterruptedException {
+    checkConsumer();
+
+    while (0 == size) {
+      wait();
+    }
+    currentConsumer = Thread.currentThread();
+    return front();
+  }
+
+  private synchronized void checkConsumer() {
+    if (currentConsumer != null) {
+      throw new ConcurrentModificationException("The "+
+          currentConsumer.getName() +" thread is consuming the queue.");
+    }
+  }
+
+  private synchronized void clearConsumer() {
+    currentConsumer = null;
+  }
+
+  private synchronized T _dequeue() {
+    if (0 == size) {
+      throw new IllegalStateException("Size must > 0 here.");
+    }
+    --size;
+    head = (head + 1) % data.length;
+    T ret = data[head];
+    data[head] = null;  // hint to gc
+    return ret;
+  }
+
+  synchronized T front() {
+    return data[(head + 1) % data.length];
+  }
+
+  synchronized T back() {
+    return data[tail];
+  }
+
+  synchronized void clear() {
+    checkConsumer();
+
+    for (int i = data.length; i-- > 0; ) {
+      data[i] = null;
+    }
+    size = 0;
+  }
+
+  synchronized int size() {
+    return size;
+  }
+
+  int capacity() {
+    return data.length;
+  }
+
+}

+ 60 - 0
src/core/org/apache/hadoop/metrics2/lib/AbstractMetricsSource.java

@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+
+/**
+ * A convenient base class for writing metrics sources
+ */
+public abstract class AbstractMetricsSource implements MetricsSource {
+
+  protected final MetricsRegistry registry;
+
+  /**
+   * Construct the source with name and a mutable metrics factory
+   * @param name  of the default record
+   * @param mf  the factory to create mutable metrics
+   */
+  public AbstractMetricsSource(String name, MetricMutableFactory mf) {
+    registry = new MetricsRegistry(name, mf);
+  }
+
+  /**
+   * Construct the source with a name with a default factory
+   * @param name  of the default record
+   */
+  public AbstractMetricsSource(String name) {
+    this(name, new MetricMutableFactory());
+  }
+
+  /**
+   * @return  the registry for mutable metrics
+   */
+  public MetricsRegistry registry() {
+    return registry;
+  }
+
+  @Override
+  public void getMetrics(MetricsBuilder builder, boolean all) {
+    registry.snapshot(builder.addRecord(registry.name()), all);
+  }
+
+}

+ 99 - 0
src/core/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java

@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsSink;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
+
+/**
+ * The default metrics system singleton
+ */
+public enum DefaultMetricsSystem implements MetricsSystem {
+
+  /**
+   * The singleton instance
+   */
+  INSTANCE;
+
+  private static final int VERSION = 2;
+  private final MetricsSystemImpl impl = new MetricsSystemImpl();
+
+  private MetricsSystem init(String prefix) {
+    impl.init(prefix);
+    return impl;
+  }
+
+  /**
+   * Common static convenience method to initialize the metrics system
+   * @param prefix  for configuration
+   * @return the metrics system instance
+   */
+  public static MetricsSystem initialize(String prefix) {
+    return INSTANCE.init(prefix);
+  }
+
+  public <T extends MetricsSource> T
+  register(String name, String desc, T source) {
+    return impl.register(name, desc, source);
+  }
+
+  /**
+   * Common static method to register a source
+   * @param <T>   type of the source
+   * @param name  of the source
+   * @param desc  description
+   * @param source  the source object to register
+   * @return the source object
+   */
+  public static <T extends MetricsSource> T
+  registerSource(String name, String desc, T source) {
+    return INSTANCE.register(name, desc, source);
+  }
+
+  public <T extends MetricsSink> T register(String name, String desc, T sink) {
+    return impl.register(name, desc, sink);
+  }
+
+  public void register(Callback callback) {
+    impl.register(callback);
+  }
+
+  public void start() {
+    impl.start();
+  }
+
+  public void stop() {
+    impl.stop();
+  }
+
+  public void refreshMBeans() {
+    impl.refreshMBeans();
+  }
+
+  public String currentConfig() {
+    return impl.currentConfig();
+  }
+
+  public void shutdown() {
+    impl.shutdown();
+  }
+
+}

+ 75 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutable.java

@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+
+/**
+ * The mutable metric interface
+ */
+public abstract class MetricMutable {
+
+  /** name of the metric */
+  public final String name;
+  /** description of the metric */
+  public final String description;
+
+  private volatile boolean changed = true;
+
+  /**
+   * Construct a mutable metric
+   * @param name  of the metric
+   * @param description of the metric
+   */
+  public MetricMutable(String name, String description) {
+    this.name = name;
+    this.description = description;
+  }
+
+  /**
+   * Get a snapshot/snapshot of the metric
+   * @param builder the metrics record builder
+   * @param all if true, snapshot unchanged metrics as well
+   */
+  public abstract void snapshot(MetricsRecordBuilder builder, boolean all);
+
+  /**
+   * Get a sampe/snapshot of metric if changed
+   * @param builder the metrics record builder
+   */
+  public void snapshot(MetricsRecordBuilder builder) {
+    snapshot(builder, false);
+  }
+
+  /**
+   * Set the changed flag in mutable operations
+   */
+  protected void setChanged() { changed = true; }
+
+  /**
+   * Clear the changed flag in the snapshot operations
+   */
+  protected void clearChanged() { changed = false; }
+
+  /**
+   * @return  true if metric is changed since last snapshot/snapshot
+   */
+  public boolean changed() { return changed; }
+
+}

+ 41 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableCounter.java

@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+/**
+ * The mutable counter (monotonically increasing) metric interface
+ * @param <T> the type of the metric
+ */
+public abstract class MetricMutableCounter<T extends Number>
+    extends MetricMutable {
+
+  /**
+   * Construct the metric with name and description
+   * @param name
+   * @param description
+   */
+  public MetricMutableCounter(String name, String description) {
+    super(name, description);
+  }
+
+  /**
+   * Increment the metric value by 1.
+   */
+  public abstract void incr();
+}

+ 63 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableCounterInt.java

@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+
+/**
+ * A mutable int counter for implementing metrics sources
+ */
+public class MetricMutableCounterInt extends MetricMutableCounter<Integer> {
+
+  private volatile int value;
+
+  /**
+   * Construct a mutable int counter
+   * @param name  of the counter
+   * @param description of the counter
+   * @param initValue the initial value of the counter
+   */
+  public MetricMutableCounterInt(String name, String description,
+                                 int initValue) {
+    super(name, description);
+    this.value = initValue;
+  }
+
+  public synchronized void incr() {
+    ++value;
+    setChanged();
+  }
+
+  /**
+   * Increment the value by a delta
+   * @param delta of the increment
+   */
+  public synchronized void incr(int delta) {
+    value += delta;
+    setChanged();
+  }
+
+  public void snapshot(MetricsRecordBuilder builder, boolean all) {
+    if (all || changed()) {
+      builder.addCounter(name, description, value);
+      clearChanged();
+    }
+  }
+
+}

+ 63 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableCounterLong.java

@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+
+/**
+ * A mutable long counter
+ */
+public class MetricMutableCounterLong extends MetricMutableCounter<Long> {
+
+  private volatile long value;
+
+  /**
+   * Construct a mutable long counter
+   * @param name  of the counter
+   * @param description of the counter
+   * @param initValue the initial value of the counter
+   */
+  public MetricMutableCounterLong(String name, String description,
+                                  long initValue) {
+    super(name, description);
+    this.value = initValue;
+  }
+
+  public synchronized void incr() {
+    ++value;
+    setChanged();
+  }
+
+  /**
+   * Increment the value by a delta
+   * @param delta of the increment
+   */
+  public synchronized void incr(long delta) {
+    value += delta;
+    setChanged();
+  }
+
+  public void snapshot(MetricsRecordBuilder builder, boolean all) {
+    if (all || changed()) {
+      builder.addCounter(name, description, value);
+      clearChanged();
+    }
+  }
+
+}

+ 156 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableFactory.java

@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.Metric;
+
+/**
+ * Factory class for mutable metrics
+ */
+public class MetricMutableFactory {
+
+  static final String DEFAULT_SAMPLE_NAME = "ops";
+  static final String DEFAULT_VALUE_NAME  = "time";
+
+  /**
+   * Create a new mutable metric by name
+   * Usually overridden by app specific factory
+   * @param name  of the metric
+   * @return  a new metric object
+   */
+  public MetricMutable newMetric(String name) {
+    return null;
+  }
+
+  /**
+   * Create a mutable integer counter
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableCounterInt newCounter(String name, String description,
+                                            int initValue) {
+    return new MetricMutableCounterInt(name, description, initValue);
+  }
+
+  /**
+   * Create a mutable integer counter with name only.
+   * Usually gets overridden.
+   * @param name  of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableCounterInt newCounterInt(String name) {
+    return new MetricMutableCounterInt(name, Metric.NO_DESCRIPTION, 0);
+  }
+
+  /**
+   * Create a mutable long integer counter
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableCounterLong newCounter(String name, String description,
+                                             long initValue) {
+    return new MetricMutableCounterLong(name, description, initValue);
+  }
+
+  /**
+   * Create a mutable long integer counter with a name
+   * Usually gets overridden.
+   * @param name  of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableCounterLong newCounterLong(String name) {
+    return new MetricMutableCounterLong(name, Metric.NO_DESCRIPTION, 0L);
+  }
+
+  /**
+   * Create a mutable integer gauge
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableGaugeInt newGauge(String name, String description,
+                                        int initValue) {
+    return new MetricMutableGaugeInt(name, description, initValue);
+  }
+
+  /**
+   * Create a mutable integer gauge with name only.
+   * Usually gets overridden.
+   * @param name  of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableGaugeInt newGaugeInt(String name) {
+    return new MetricMutableGaugeInt(name, Metric.NO_DESCRIPTION, 0);
+  }
+
+  /**
+   * Create a mutable long integer gauge
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableGaugeLong newGauge(String name, String description,
+                                         long initValue) {
+    return new MetricMutableGaugeLong(name, description, initValue);
+  }
+
+  /**
+   * Create a mutable long integer gauge with name only.
+   * Usually gets overridden.
+   * @param name  of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableGaugeLong newGaugeLong(String name) {
+    return new MetricMutableGaugeLong(name, Metric.NO_DESCRIPTION, 0L);
+  }
+
+  /**
+   * Create a mutable stat metric
+   * @param name  of the metric
+   * @param description of the metric
+   * @param sampleName  of the metric (e.g., ops)
+   * @param valueName   of the metric (e.g., time or latency)
+   * @param extended    if true, produces extended stat (stdev, min/max etc.)
+   * @return  a new metric object
+   */
+  public MetricMutableStat newStat(String name, String description,
+                                   String sampleName, String valueName,
+                                   boolean extended) {
+    return new MetricMutableStat(name, description, sampleName, valueName,
+                                 extended);
+  }
+
+  /**
+   * Create a mutable stat metric with name only.
+   * Usually gets overridden.
+   * @param name  of the metric
+   * @return  a new metric object
+   */
+  public MetricMutableStat newStat(String name) {
+    return new MetricMutableStat(name, name, DEFAULT_SAMPLE_NAME,
+                                 DEFAULT_VALUE_NAME);
+  }
+
+}

+ 47 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableGauge.java

@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+/**
+ * The mutable gauge metric interface
+ * @param <T> the type of the metric
+ */
+public abstract class MetricMutableGauge<T extends Number>
+    extends MetricMutable {
+
+  /**
+   * Construct the metric with name and description
+   * @param name  of the metric
+   * @param description of the metric
+   */
+  public MetricMutableGauge(String name, String description) {
+    super(name, description);
+  }
+
+  /**
+   * Increment the value of the metric by 1
+   */
+  public abstract void incr();
+
+  /**
+   * Decrement the value of the metric by 1
+   */
+  public abstract void decr();
+
+}

+ 86 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableGaugeInt.java

@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+
+/**
+ * A mutable int gauge
+ */
+public class MetricMutableGaugeInt extends MetricMutableGauge<Integer> {
+
+  private volatile int value;
+
+  /**
+   * Construct a mutable int gauge metric
+   * @param name  of the gauge
+   * @param description of the gauge
+   * @param initValue the initial value of the gauge
+   */
+  public MetricMutableGaugeInt(String name, String description,
+                               int initValue) {
+    super(name, description);
+    this.value = initValue;
+  }
+
+  public synchronized void incr() {
+    ++value;
+    setChanged();
+  }
+
+  /**
+   * Increment by delta
+   * @param delta of the increment
+   */
+  public synchronized void incr(int delta) {
+    value += delta;
+    setChanged();
+  }
+
+  public synchronized void decr() {
+    --value;
+    setChanged();
+  }
+
+  /**
+   * decrement by delta
+   * @param delta of the decrement
+   */
+  public synchronized void decr(int delta) {
+    value -= delta;
+    setChanged();
+  }
+
+  /**
+   * Set the value of the metric
+   * @param value to set
+   */
+  public void set(int value) {
+    this.value = value;
+    setChanged();
+  }
+
+  public void snapshot(MetricsRecordBuilder builder, boolean all) {
+    if (all || changed()) {
+      builder.addGauge(name, description, value);
+      clearChanged();
+    }
+  }
+
+}

+ 86 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableGaugeLong.java

@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+
+/**
+ * A mutable long gauge
+ */
+public class MetricMutableGaugeLong extends MetricMutableGauge<Long> {
+
+  private volatile long value;
+
+  /**
+   * Construct a mutable long gauge metric
+   * @param name  of the gauge
+   * @param description of the gauge
+   * @param initValue the initial value of the gauge
+   */
+  public MetricMutableGaugeLong(String name, String description,
+                                long initValue) {
+    super(name, description);
+    this.value = initValue;
+  }
+
+  public synchronized void incr() {
+    ++value;
+    setChanged();
+  }
+
+  /**
+   * Increment by delta
+   * @param delta of the increment
+   */
+  public synchronized void incr(long delta) {
+    value += delta;
+    setChanged();
+  }
+
+  public synchronized void decr() {
+    --value;
+    setChanged();
+  }
+
+  /**
+   * decrement by delta
+   * @param delta of the decrement
+   */
+  public synchronized void decr(long delta) {
+    value -= delta;
+    setChanged();
+  }
+
+  /**
+   * Set the value of the metric
+   * @param value to set
+   */
+  public void set(long value) {
+    this.value = value;
+    setChanged();
+  }
+
+  public void snapshot(MetricsRecordBuilder builder, boolean all) {
+    if (all || changed()) {
+      builder.addGauge(name, description, value);
+      clearChanged();
+    }
+  }
+
+}

+ 140 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricMutableStat.java

@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.util.SampleStat;
+
+/**
+ * A mutable metric with stats
+ *
+ * Useful for keep throughput/latency stats.
+ * e.g., new MetricMutableStat("rpcName", "rpcName stats", "ops", "time");
+ */
+public class MetricMutableStat extends MetricMutable {
+
+  private final String numSamplesName, numSamplesDesc;
+  private final String avgValueName, avgValueDesc;
+  private final String stdevValueName, stdevValueDesc;
+  private final String iMinValueName, iMinValueDesc;
+  private final String iMaxValueName, iMaxValueDesc;
+  private final String minValueName, minValueDesc;
+  private final String maxValueName, maxValueDesc;
+
+  private final SampleStat intervalStat = new SampleStat();
+  private final SampleStat prevStat = new SampleStat();
+  private final SampleStat.MinMax minMax = new SampleStat.MinMax();
+  private long numSamples = 0;
+  private boolean extended = false;
+
+  /**
+   * Construct a sample statistics metric
+   * @param name        of the metric
+   * @param description of the metric
+   * @param sampleName  of the metric (e.g. "ops")
+   * @param valueName   of the metric (e.g. "time", "latency")
+   * @param extended    create extended stats (stdev, min/max etc.) by default.
+   */
+  public MetricMutableStat(String name, String description,
+                           String sampleName, String valueName,
+                           boolean extended) {
+    super(name, description);
+    String desc = StringUtils.uncapitalize(description);
+    numSamplesName = name +"_num_"+ sampleName;
+    numSamplesDesc = "Number of "+ sampleName +" for "+ desc;
+    avgValueName = name +"_avg_"+ valueName;
+    avgValueDesc = "Average "+ valueName +" for "+ desc;
+    stdevValueName = name +"_stdev_"+ valueName;
+    stdevValueDesc = "Standard deviation of "+ valueName +" for "+ desc;
+    iMinValueName = name +"_imin_"+ valueName;
+    iMinValueDesc = "Interval min "+ valueName +" for "+ desc;
+    iMaxValueName = name + "_imax_"+ valueName;
+    iMaxValueDesc = "Interval max "+ valueName +" for "+ desc;
+    minValueName = name +"_min_"+ valueName;
+    minValueDesc = "Min "+ valueName +" for "+ desc;
+    maxValueName = name +"_max_"+ valueName;
+    maxValueDesc = "Max "+ valueName +" for "+ desc;
+    this.extended = extended;
+  }
+
+  /**
+   * Construct a snapshot stat metric with extended stat off by default
+   * @param name        of the metric
+   * @param description of the metric
+   * @param sampleName  of the metric (e.g. "ops")
+   * @param valueName   of the metric (e.g. "time", "latency")
+   */
+  public MetricMutableStat(String name, String description,
+                           String sampleName, String valueName) {
+    this(name, description, sampleName, valueName, false);
+  }
+
+  /**
+   * Add a number of samples and their sum to the running stat
+   * @param numSamples  number of samples
+   * @param sum of the samples
+   */
+  public synchronized void add(long numSamples, long sum) {
+    intervalStat.add(numSamples, sum);
+    setChanged();
+  }
+
+  /**
+   * Add a snapshot to the metric
+   * @param value of the metric
+   */
+  public synchronized void add(long value) {
+    intervalStat.add(value);
+    minMax.add(value);
+    setChanged();
+  }
+
+  public synchronized void snapshot(MetricsRecordBuilder builder, boolean all) {
+    if (all || changed()) {
+      numSamples += intervalStat.numSamples();
+      builder.addCounter(numSamplesName, numSamplesDesc, numSamples);
+      builder.addGauge(avgValueName, avgValueDesc, lastStat().mean());
+      if (extended) {
+        builder.addGauge(stdevValueName, stdevValueDesc, lastStat().stddev());
+        builder.addGauge(iMinValueName, iMinValueDesc, lastStat().min());
+        builder.addGauge(iMaxValueName, iMaxValueDesc, lastStat().max());
+        builder.addGauge(minValueName, minValueDesc, minMax.min());
+        builder.addGauge(maxValueName, maxValueDesc, minMax.max());
+      }
+      if (changed()) {
+        intervalStat.copyTo(prevStat);
+        intervalStat.reset();
+        clearChanged();
+      }
+    }
+  }
+
+  private SampleStat lastStat() {
+    return changed() ? intervalStat : prevStat;
+  }
+
+  /**
+   * Reset the all time min max of the metric
+   */
+  public void resetMinMax() {
+    minMax.reset();
+  }
+
+}

+ 364 - 0
src/core/org/apache/hadoop/metrics2/lib/MetricsRegistry.java

@@ -0,0 +1,364 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.lib;
+
+import java.util.LinkedHashMap;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * An optional metrics registry class for creating and maintaining a
+ * collection of MetricsMutables, making writing metrics source easier.
+ */
+public class MetricsRegistry {
+
+  /** key for the context tag */
+  public static final String CONTEXT_KEY = "context";
+  /** description for the context tag */
+  public static final String CONTEXT_DESC = "Metrics context";
+
+  private final LinkedHashMap<String, MetricMutable> metricsMap =
+      new LinkedHashMap<String, MetricMutable>();
+  private final LinkedHashMap<String, MetricsTag> tagsMap =
+      new LinkedHashMap<String, MetricsTag>();
+  private final String name;
+  private final MetricMutableFactory mf;
+
+  /**
+   * Construct the registry with a record name
+   * @param name  of the record of the metrics
+   */
+  public MetricsRegistry(String name) {
+    this.name = name;
+    this.mf = new MetricMutableFactory();
+  }
+
+  /**
+   * Construct the registry with a name and a metric factory
+   * @param name  of the record of the metrics
+   * @param factory for creating new mutable metrics
+   */
+  public MetricsRegistry(String name, MetricMutableFactory factory) {
+    this.name = name;
+    this.mf = factory;
+  }
+
+  /**
+   * @return  the name of the metrics registry
+   */
+  public String name() {
+    return name;
+  }
+
+  /**
+   * Get a metric by name
+   * @param name  of the metric
+   * @return  the metric object
+   */
+  public MetricMutable get(String name) {
+    return metricsMap.get(name);
+  }
+
+  /**
+   * Create a mutable integer counter
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new counter object
+   */
+  public MetricMutableCounterInt
+  newCounter(String name, String description, int initValue) {
+    checkMetricName(name);
+    MetricMutableCounterInt ret = mf.newCounter(name, description, initValue);
+    metricsMap.put(name, ret);
+    return ret;
+  }
+
+  /**
+   * Create a mutable long integer counter
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new counter object
+   */
+  public MetricMutableCounterLong
+  newCounter(String name, String description, long initValue) {
+    checkMetricName(name);
+    MetricMutableCounterLong ret = mf.newCounter(name, description, initValue);
+    metricsMap.put(name, ret);
+    return ret;
+  }
+
+  /**
+   * Create a mutable integer gauge
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new gauge object
+   */
+  public MetricMutableGaugeInt
+  newGauge(String name, String description, int initValue) {
+    checkMetricName(name);
+    MetricMutableGaugeInt ret = mf.newGauge(name, description, initValue);
+    metricsMap.put(name, ret);
+    return ret;
+  }
+
+  /**
+   * Create a mutable long integer gauge
+   * @param name  of the metric
+   * @param description of the metric
+   * @param initValue of the metric
+   * @return  a new gauge object
+   */
+  public MetricMutableGaugeLong
+  newGauge(String name, String description, long initValue) {
+    checkMetricName(name);
+    MetricMutableGaugeLong ret = mf.newGauge(name, description, initValue);
+    metricsMap.put(name, ret);
+    return ret;
+  }
+
+  /**
+   * Create a mutable metric with stats
+   * @param name  of the metric
+   * @param description of the metric
+   * @param sampleName  of the metric (e.g., "ops")
+   * @param valueName   of the metric (e.g., "time" or "latency")
+   * @param extended    produce extended stat (stdev, min/max etc.) if true.
+   * @return  a new metric object
+   */
+  public MetricMutableStat newStat(String name, String description,
+                                   String sampleName, String valueName,
+                                   boolean extended) {
+    checkMetricName(name);
+    MetricMutableStat ret =
+        mf.newStat(name, description, sampleName, valueName, extended);
+    metricsMap.put(name, ret);
+    return ret;
+  }
+
+  /**
+   * Create a mutable metric with stats
+   * @param name  of the metric
+   * @param description of the metric
+   * @param sampleName  of the metric (e.g., "ops")
+   * @param valueName   of the metric (e.g., "time" or "latency")
+   * @return  a new metric object
+   */
+  public MetricMutableStat newStat(String name, String description,
+                                   String sampleName, String valueName) {
+    return newStat(name, description, sampleName, valueName, false);
+  }
+
+  /**
+   * Create a mutable metric with stats using the name only
+   * @param name  of the metric
+   * @return a new metric object
+   */
+  public MetricMutableStat newStat(String name) {
+    return newStat(name, "", "ops", "time", false);
+  }
+
+  /**
+   * Increment a metric by name.
+   * @param name  of the metric
+   */
+  public void incr(String name) {
+    incr(name, mf);
+  }
+
+  /**
+   * Increment a metric by name.
+   * @param name  of the metric
+   * @param factory to lazily create the metric if not null
+   */
+  public void incr(String name, MetricMutableFactory factory) {
+    MetricMutable m = metricsMap.get(name);
+
+    if (m != null) {
+      if (m instanceof MetricMutableGauge<?>) {
+        ((MetricMutableGauge<?>) m).incr();
+      }
+      else if (m instanceof MetricMutableCounter<?>) {
+        ((MetricMutableCounter<?>) m).incr();
+      }
+      else {
+        throw new MetricsException("Unsupported incr() for metric "+ name);
+      }
+    }
+    else if (factory != null) {
+      metricsMap.put(name, factory.newMetric(name));
+      incr(name, null);
+    }
+    else {
+      throw new MetricsException("Metric "+ name +" doesn't exist");
+    }
+  }
+
+  /**
+   * Decrement a metric by name.
+   * @param name  of the metric
+   */
+  public void decr(String name) {
+    decr(name, mf);
+  }
+
+  /**
+   * Decrement a metric by name.
+   * @param name  of the metric
+   * @param factory to lazily create the metric if not null
+   */
+  public void decr(String name, MetricMutableFactory factory) {
+    MetricMutable m = metricsMap.get(name);
+
+    if (m != null) {
+      if (m instanceof MetricMutableGauge<?>) {
+        ((MetricMutableGauge<?>) m).decr();
+      }
+      else {
+        throw new MetricsException("Unsupported decr() for metric "+ name);
+      }
+    }
+    else if (factory != null) {
+      metricsMap.put(name, factory.newMetric(name));
+      decr(name, null);
+    }
+    else {
+      throw new MetricsException("Metric "+ name +" doesn't exist");
+    }
+  }
+
+  /**
+   * Add a value to a metric by name.
+   * @param name  of the metric
+   * @param value of the snapshot to add
+   */
+  public void add(String name, long value) {
+    add(name, value, mf);
+  }
+
+  /**
+   * Decrement a metric by name.
+   * @param name  of the metric
+   * @param value of the snapshot to add
+   * @param factory to lazily create the metric if not null
+   */
+  public void add(String name, long value, MetricMutableFactory factory) {
+    MetricMutable m = metricsMap.get(name);
+
+    if (m != null) {
+      if (m instanceof MetricMutableStat) {
+        ((MetricMutableStat) m).add(value);
+      }
+      else {
+        throw new MetricsException("Unsupported add(value) for metric "+ name);
+      }
+    }
+    else if (factory != null) {
+      metricsMap.put(name, factory.newStat(name));
+      add(name, value, null);
+    }
+    else {
+      throw new MetricsException("Metric "+ name +" doesn't exist");
+    }
+  }
+
+  /**
+   * Set the metrics context tag
+   * @param name of the context
+   * @return
+   */
+  public MetricsRegistry setContext(String name) {
+    return tag(CONTEXT_KEY, CONTEXT_DESC, name);
+  }
+
+  /**
+   * Add a tag to the metrics
+   * @param name  of the tag
+   * @param description of the tag
+   * @param value of the tag
+   * @return  the registry (for keep adding tags)
+   */
+  public MetricsRegistry tag(String name, String description, String value) {
+    return tag(name, description, value, false);
+  }
+
+  /**
+   * Add a tag to the metrics
+   * @param name  of the tag
+   * @param description of the tag
+   * @param value of the tag
+   * @param override  existing tag if true
+   * @return  the registry (for keep adding tags)
+   */
+  public MetricsRegistry tag(String name, String description, String value,
+                             boolean override) {
+    if (!override) checkTagName(name);
+    tagsMap.put(name, new MetricsTag(name, description, value));
+    return this;
+  }
+
+  /**
+   * Get the tags
+   * @return  the tags set
+   */
+  public Set<Entry<String, MetricsTag>> tags() {
+    return tagsMap.entrySet();
+  }
+
+  /**
+   * Get the metrics
+   * @return  the metrics set
+   */
+  public Set<Entry<String, MetricMutable>> metrics() {
+    return metricsMap.entrySet();
+  }
+
+  private void checkMetricName(String name) {
+    if (metricsMap.containsKey(name)) {
+      throw new MetricsException("Metric name "+ name +" already exists!");
+    }
+  }
+
+  private void checkTagName(String name) {
+    if (tagsMap.containsKey(name)) {
+      throw new MetricsException("Tag "+ name +" already exists!");
+    }
+  }
+
+  /**
+   * Sample all the mutable metrics and put the snapshot in the builder
+   * @param builder to contain the metrics snapshot
+   * @param all get all the metrics even if the values are not changed.
+   */
+  public void snapshot(MetricsRecordBuilder builder, boolean all) {
+    for (Entry<String, MetricsTag> entry : tags()) {
+      builder.add(entry.getValue());
+    }
+    for (Entry<String, MetricMutable> entry : metrics()) {
+      entry.getValue().snapshot(builder, all);
+    }
+  }
+
+}

+ 319 - 0
src/core/org/apache/hadoop/metrics2/package.html

@@ -0,0 +1,319 @@
+<html>
+
+  <!--
+     Licensed to the Apache Software Foundation (ASF) under one or more
+     contributor license agreements.  See the NOTICE file distributed with
+     this work for additional information regarding copyright ownership.
+     The ASF licenses this file to You under the Apache License, Version 2.0
+     (the "License"); you may not use this file except in compliance with
+     the License.  You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+  -->
+
+  <head>
+    <title>org.apache.hadoop.metrics2</title>
+  </head>
+  <body>
+    <h1>Metrics 2.0</h1>
+    <ul id="toc">
+      <li><a href="#overview">Overview</a></li>
+      <li><a href="#gettingstarted">Getting Started</a></li>
+      <li><a href="#filtering">Metrics Filtering</a></li>
+      <li><a href="#instrumentation">Metrics Instrumentation Strategy</a></li>
+      <li><a href="#migration">Migration from previous system</a></li>
+    </ul>
+    <h2><a name="overview">Overview</a></h2>
+    <p>This package provides a framework for metrics instrumentation
+      and publication.
+    </p>
+
+    <p>The instrumentation of metrics just need to implement the simple
+      <code>MetricsSource</code> interface with a single <code>getMetrics</code>
+      method; The consumers of metrics just need to implement the simple
+      <code>MetricsSink</code> interface with a <code>putMetrics</code>
+      method along with the <code>init</code> and <code>flush</code> methods.
+      Producers register the metrics
+      sources with a metrics system, while consumers register the sinks. A
+      default metrics system is provided to marshal metrics from sources to
+      sinks based on (per source/sink) configuration options. Metrics
+      from <code>getMetrics</code> would also be published and queryable via
+      the standard JMX mechanism. This document targets the framework
+      users. Framework developers could consult the
+      <a href="http://wiki.apache.org/hadoop/HADOOP-6728-MetricsV2">design
+      document</a> for architecture and implementation notes.
+    </p>
+    <h3>Sub-packages</h3>
+    <dl>
+      <dt><code>org.apache.hadoop.metrics2.impl</code></dt>
+      <dd>Implementation classes of the framework for interface and/or
+        abstract classes defined in the top-level package. Sink plugin code
+        usually does not need to reference any class here.
+      </dd>
+      <dt> <code>org.apache.hadoop.metrics2.lib</code></dt>
+      <dd>Convenience classes for implementing metrics sources, including the
+        <code>MetricMutable[Gauge*|Counter*|Stat]</code> and
+        <code>MetricsRegistry</code>.
+      </dd>
+      <dt> <code>org.apache.hadoop.metrics2.filter</code></dt>
+      <dd>Builtin metrics filter implementations include the
+        <code>GlobFilter</code> and <code>RegexFilter</code>.
+      </dd>
+      <dt><code>org.apache.hadoop.metrics2.source</code></dt>
+      <dd>Builtin metrics source implementations including the
+        <code>JvmMetricsSource</code>.
+      </dd>
+      <dt> <code>org.apache.hadoop.metrics2.sink</code></dt>
+      <dd>Builtin metrics sink implementations including the
+        <code>FileSink</code>.
+      </dd>
+    </dl>
+
+    <h2><a name="gettingstarted">Getting started</a></h2>
+    <p>Here is a simple MetricsSource:</p>
+    <pre>
+    class MyMetrics implements MetricsSource {
+      public void getMetrics(MetricsBuilder builder, boolean all) {
+        builder.addRecord("myRecord").setContext("myContext")
+               .addGauge("myMetric", "My metrics description", 42);
+      }
+    }</pre>
+    <p>In this example there are three names:</p>
+    <dl>
+      <dt><em>myContext</em></dt>
+      <dd>The optional context name typically identifies either the
+        application, or a group of modules within an application or
+        library.
+      </dd>
+
+      <dt><em>myRecord</em></dt>
+      <dd>The record name generally identifies some entity for which a set of
+        metrics are to be reported.  For example, you could have a record named
+        "cacheStat" for reporting a number of statistics relating to the usage
+        of some cache in your application.</dd>
+
+      <dt><em>myMetric</em></dt>
+      <dd>This identifies a particular metric. In this case the metric is a
+        "gauge" type, which means it can vary in both directions, compared
+        with a "counter" type, which can only increase or stay the same. The 42
+        here is the value of the metric, which can be substituted a variable or
+        a non-blocking method call returning a number.
+      </dd>
+    </dl>
+    <p>Note, the boolean argument <code>all</code>, if true, means that the
+      source should send all the metrics it defines, even if the metrics
+      are unchanged since last <code>getMetrics</code> call. This enable an
+      optimization for less copying for metrics that don't change much
+      (total capacity of something etc. which only changes when new
+      resources (nodes or disks) are being added.)
+    </p>
+    <p>Here is a simple MetricsSink:</p>
+    <pre>
+    public class MySink implements MetricsSink {
+      public void putMetrics(MetricsRecord record) {
+        System.out.print(record);
+      }
+      public void init(SubsetConfiguration conf) {}
+      public void flush() {}
+    }</pre>
+    <p>In this example there are three additional concepts:</p>
+    <dl>
+      <dt><em>record</em></dt>
+      <dd>This object corresponds to the record created in metrics sources
+        e.g., the "myRecord" in previous example.
+      </dd>
+      <dt><em>conf</em></dt>
+      <dd>The configuration object for the sink instance with prefix removed.
+        So you can get any sink specific configuration using the usual
+        get* method.
+      </dd>
+      <dt><em>flush</em></dt>
+      <dd>This method is called for each update cycle, which may involve
+        more than one record. The sink should try to flush any buffered metrics
+        to its backend upon the call. But it's not required that the
+        implementation is synchronous.
+      </dd>
+    </dl>
+    <p>In order to make use our <code>MyMetrics</code> and <code>MySink</code>,
+      they need to be hooked up to a metrics system. In this case (and most
+      cases), the <code>DefaultMetricsSystem</code> would suffice.
+    </p>
+    <pre>
+    DefaultMetricsSystem.initialize("test"); // called once per application
+    DefaultMetricsSystem.INSTANCE.register("MyMetrics", "my metrics description",
+                                           new MyMetrics());</pre>
+    <p>Sinks are usually specified in a configuration file, say,
+    "hadoop-metrics2-test.properties", as:
+    </p>
+    <pre>
+    test.sink.mysink0.class=com.example.hadoop.metrics.MySink</pre>
+    <p>The configuration syntax is:</p>
+    <pre>
+    [prefix].[source|sink|jmx|].[instance].[option]</pre>
+    <p>In the previous example, <code>test</code> is the prefix and
+      <code>mysink0</code> is an instance name.
+      <code>DefaultMetricsSystem</code> would try to load
+      <code>hadoop-metrics2-[prefix].properties</code> first, and if not found,
+      try the default <code>hadoop-metrics2.properties</code> in the class path.
+      Note, the <code>[instance]</code> is an arbitrary name to uniquely
+      identify a particular sink instance. The asterisk (<code>*</code>) can be
+      used to specify default options.
+    </p>
+    <p>Consult the metrics instrumentation in jvm, rpc, hdfs and mapred, etc.
+      for more examples.
+    </p>
+
+    <h2><a name="filtering">Metrics Filtering</a></h2>
+    <p>One of the features of the default metrics system is metrics filtering
+      configuration by source, context, record/tags and metrics. The least
+      expensive way to filter out metrics would be at the source level, e.g.,
+      filtering out source named "MyMetrics". The most expensive way would be
+      per metric filtering.
+    </p>
+    <p>Here are some examples:</p>
+    <pre>
+    test.sink.file0.class=org.apache.hadoop.metrics2.sink.FileSink
+    test.sink.file0.context=foo</pre>
+    <p>In this example, we configured one sink instance that would
+      accept metrics from context <code>foo</code> only.
+    </p>
+    <pre>
+    *.source.filter.class=org.apache.hadoop.metrics.filter.GlobFilter
+    test.*.source.filter.include=foo
+    test.*.source.filter.exclude=bar</pre>
+    <p>In this example, we specify a source filter that includes source
+      <code>foo</code> and excludes <code>bar</code>. When only include
+      patterns are specified, the filter operates in the white listing mode,
+      where only matched sources are included. Likewise, when only exclude
+      patterns are specified, only matched sources are excluded. Sources that
+      are not matched in either patterns are included as well when both patterns
+      are present. Note, the include patterns have precedence over the exclude
+      patterns.
+    </p>
+    <p>Similarly, you can specify the <code>record.filter</code> and
+      <code>metrics.filter</code> options, which operate at record and metric
+      level, respectively. Filters can be combined to optimize
+      the filtering efficiency.</p>
+
+    <h2><a name="instrumentation">Metrics instrumentation strategy</a></h2>
+
+    In previous examples, we showed a minimal example to use the
+    metrics framework. In a larger system (like Hadoop) that allows
+    custom metrics instrumentation, we recommend the following strategy:
+    <pre>
+    class MyMetrics extends MyInstrumentation implements MetricsSource {
+
+      final MetricsRegistry registry = new MetricsRegistry("myRecord");
+      final MetricMutableGaugeInt gauge0 =
+          registry.newGauge("myGauge", "my gauge description", 0);
+      final MetricMutableCounterLong counter0 =
+          registry.newCounter("myCounter", "my metric description", 0L);
+      final MetricMutaleStat stat0 =
+          registry.newStat("myStat", "my stat description", "ops", "time");
+
+      @Override public void setGauge0(int value) { gauge0.set(value); }
+      @Override public void incrCounter0() { counter0.incr(); }
+      @Override public void addStat0(long elapsed) { stat0.add(elapsed); }
+
+      public void getMetrics(MetricsBuilder builder, boolean all) {
+        registry.snapshot(builder.addRecord(registry.name()), all);
+      }
+
+    }
+    </pre>
+
+    Note, in this example we introduced the following:
+    <dl>
+      <dt><em>MyInstrumentation</em></dt>
+      <dd>This is usually an abstract class (or interface) to define an
+        instrumentation interface (incrCounter0 etc.) that allows different
+        implementations. This is a mechanism to allow different metrics
+        systems to be used at runtime via configuration.
+      </dd>
+      <dt><em>MetricsRegistry</em></dt>
+      <dd>This is a library class for easy creation and manipulation of
+        mutable metrics.
+      </dd>
+      <dt><em>MetricMutable[Gauge*|Counter*|Stat]</em></dt>
+      <dd>These are library classes to manage mutable metrics for
+        implementations of metrics sources. They produce immutable gauge and
+        counters (Metric[Gauge*|Counter*]) for downstream consumption (sinks)
+        upon <code>snapshot</code>. The <code>MetricMutableStat</code>
+        in particular, provides a way to measure latency and throughput of an
+        operation. In this particular case, it produces a long counter
+        "myStat_num_ops" and double gauge "myStat_avg_time" when snapshotted.
+      </dd>
+    </dl>
+
+    <h2><a name="migration">Migration from previous system</a></h2>
+    <p>Users of the previous metrics system would notice the lack of
+      <code>context</code> prefix in the configuration examples. The new
+      metrics system decouples the concept for context (for grouping) with the
+      implementation where a particular context object does the updating and
+      publishing of metrics, which causes problems when you want to have a
+      single context to be consumed by multiple backends. You would also have to
+      configure an implementation instance per context, even if you have a
+      backend that can handle multiple contexts (file, gangalia etc.):
+    </p>
+    <table width="99%" border="1" cellspacing="0" cellpadding="4">
+      <tbody>
+        <tr>
+          <th width="40%">Before</th><th>After</th>
+        </tr>
+        <tr>
+          <td><pre>
+    context1.class=org.hadoop.metrics.file.FileContext
+    context2.class=org.hadoop.metrics.file.FileContext
+    ...
+    contextn.class=org.hadoop.metrics.file.FileContext</pre>
+          </td>
+          <td><pre>
+    myprefix.sink.file.class=org.hadoop.metrics2.sink.FileSink</pre>
+          </td>
+        </tr>
+      </tbody>
+    </table>
+    <p>In the new metrics system, you can simulate the previous behavior by
+      using the context option in the sink options like the following:
+    </p>
+    <table width="99%" border="1" cellspacing="0" cellpadding="4">
+      <tbody>
+        <tr>
+          <th width="40%">Before</th><th>After</th>
+        </tr>
+        <tr>
+          <td><pre>
+    context0.class=org.hadoop.metrics.file.FileContext
+    context0.fileName=context0.out
+    context1.class=org.hadoop.metrics.file.FileContext
+    context1.fileName=context1.out
+    ...
+    contextn.class=org.hadoop.metrics.file.FileContext
+    contextn.fileName=contextn.out</pre>
+          </td>
+          <td><pre>
+    myprefix.sink.*.class=org.apache.hadoop.metrics2.sink.FileSink
+    myprefix.sink.file0.context=context0
+    myprefix.sink.file0.filename=context1.out
+    myprefix.sink.file1.context=context1
+    myprefix.sink.file1.filename=context1.out
+    ...
+    myprefix.sink.filen.context=contextn
+    myprefix.sink.filen.filename=contextn.out</pre>
+          </td>
+        </tr>
+      </tbody>
+    </table>
+    <p>to send metrics of a particular context to a particular backend. Note,
+      <code>myprefix</code> is an arbitrary prefix for configuration groupings,
+      typically they are the name of a particular process
+      (<code>namenode</code>, <code>jobtracker</code>, etc.)
+    </p>
+  </body>
+</html>

+ 83 - 0
src/core/org/apache/hadoop/metrics2/sink/FileSink.java

@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.PrintWriter;
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.hadoop.metrics2.Metric;
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsSink;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * 
+ */
+public class FileSink implements MetricsSink {
+
+  private static final String FILENAME_KEY = "filename";
+  private PrintWriter writer;
+
+  @Override
+  public void init(SubsetConfiguration conf) {
+    String filename = conf.getString(FILENAME_KEY);
+    try {
+      writer = filename == null
+          ? new PrintWriter(new BufferedOutputStream(System.out))
+          : new PrintWriter(new FileWriter(new File(filename), true));
+    }
+    catch (Exception e) {
+      throw new MetricsException("Error creating "+ filename, e);
+    }
+  }
+
+  @Override
+  public void putMetrics(MetricsRecord record) {
+    writer.print(record.timestamp());
+    writer.print(" ");
+    writer.print(record.context());
+    writer.print(".");
+    writer.print(record.name());
+    String separator = ": ";
+    for (MetricsTag tag : record.tags()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(tag.name());
+      writer.print("=");
+      writer.print(String.valueOf(tag.value()));
+    }
+    for (Metric metric : record.metrics()) {
+      writer.print(separator);
+      separator = ", ";
+      writer.print(metric.name());
+      writer.print("=");
+      writer.print(metric.value());
+    }
+    writer.println();
+  }
+
+  @Override
+  public void flush() {
+    writer.flush();
+  }
+
+}

+ 191 - 0
src/core/org/apache/hadoop/metrics2/source/JvmMetricsSource.java

@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.source;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryUsage;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import static java.lang.Thread.State.*;
+import java.lang.management.GarbageCollectorMXBean;
+import java.util.List;
+import org.apache.hadoop.metrics2.MetricsBuilder;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+
+import org.apache.hadoop.log.EventCounter;
+
+/**
+ *
+ */
+public class JvmMetricsSource implements MetricsSource {
+
+  private static final float M = 1024*1024;
+
+  static final String SOURCE_NAME = "jvm";
+  static final String CONTEXT = "jvm";
+  static final String RECORD_NAME = "metrics";
+  static final String SOURCE_DESC = "JVM metrics etc.";
+
+  // tags
+  static final String PROCESSNAME_KEY = "processName";
+  static final String PROCESSNAME_DESC = "Process name";
+  static final String SESSIONID_KEY = "sessionId";
+  static final String SESSIONID_DESC = "Session ID";
+  private final String processName, sessionId;
+
+  // metrics
+  static final String NONHEAP_USED_KEY = "memNonHeapUsedM";
+  static final String NONHEAP_USED_DESC = "Non-heap memory used in MB";
+  static final String NONHEAP_COMMITTED_KEY = "memNonHeapCommittedM";
+  static final String NONHEAP_COMMITTED_DESC = "Non-heap committed in MB";
+  static final String HEAP_USED_KEY = "memHeapUsedM";
+  static final String HEAP_USED_DESC = "Heap memory used in MB";
+  static final String HEAP_COMMITTED_KEY = "memHeapCommittedM";
+  static final String HEAP_COMMITTED_DESC = "Heap memory committed in MB";
+  static final String GC_COUNT_KEY = "gcCount";
+  static final String GC_COUNT_DESC = "Total GC count";
+  static final String GC_TIME_KEY = "gcTimeMillis";
+  static final String GC_TIME_DESC = "Total GC time in milliseconds";
+  static final String THREADS_NEW_KEY = "threadsNew";
+  static final String THREADS_NEW_DESC = "Number of new threads";
+  static final String THREADS_RUNNABLE_KEY = "threadsRunnable";
+  static final String THREADS_RUNNABLE_DESC = "Number of runnable threads";
+  static final String THREADS_BLOCKED_KEY = "threadsBlocked";
+  static final String THREADS_BLOCKED_DESC = "Number of blocked threads";
+  static final String THREADS_WAITING_KEY = "threadsWaiting";
+  static final String THREADS_WAITING_DESC = "Number of waiting threads";
+  static final String THREADS_TIMEDWAITING_KEY = "threadsTimedWaiting";
+  static final String THREADS_TIMEDWAITING_DESC =
+      "Number of timed waiting threads";
+  static final String THREADS_TERMINATED_KEY = "threadsTerminated";
+  static final String THREADS_TERMINATED_DESC = "Number of terminated threads";
+  static final String LOG_FATAL_KEY = "logFatal";
+  static final String LOG_FATAL_DESC = "Total number of fatal log events";
+  static final String LOG_ERROR_KEY = "logError";
+  static final String LOG_ERROR_DESC = "Total number of error log events";
+  static final String LOG_WARN_KEY = "logWarn";
+  static final String LOG_WARN_DESC = "Total number of warning log events";
+  static final String LOG_INFO_KEY = "logInfo";
+  static final String LOG_INFO_DESC = "Total number of info log events";
+
+  private final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+  private final List<GarbageCollectorMXBean> gcBeans =
+      ManagementFactory.getGarbageCollectorMXBeans();
+  private final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
+
+  JvmMetricsSource(String processName, String sessionId) {
+    this.processName = processName;
+    this.sessionId = sessionId;
+  }
+
+  public static JvmMetricsSource create(String processName, String sessionId,
+                                        MetricsSystem ms) {
+    return ms.register(SOURCE_NAME, SOURCE_DESC,
+                       new JvmMetricsSource(processName, sessionId));
+  }
+
+  public static JvmMetricsSource create(String processName, String sessionId) {
+    return create(processName, sessionId, DefaultMetricsSystem.INSTANCE);
+  }
+
+  @Override
+  public void getMetrics(MetricsBuilder builder, boolean all) {
+    MetricsRecordBuilder rb = builder.addRecord(RECORD_NAME)
+        .setContext(CONTEXT)
+        .tag(PROCESSNAME_KEY, PROCESSNAME_DESC, processName)
+        .tag(SESSIONID_KEY, SESSIONID_DESC, sessionId);
+    getMemoryUsage(rb);
+    getGcUsage(rb);
+    getThreadUsage(rb);
+    getEventCounters(rb);
+  }
+
+  private void getMemoryUsage(MetricsRecordBuilder rb) {
+    MemoryUsage memNonHeap = memoryMXBean.getNonHeapMemoryUsage();
+    MemoryUsage memHeap = memoryMXBean.getHeapMemoryUsage();
+    rb.addGauge(NONHEAP_USED_KEY, NONHEAP_USED_DESC, memNonHeap.getUsed() / M)
+      .addGauge(NONHEAP_COMMITTED_KEY, NONHEAP_COMMITTED_DESC,
+                memNonHeap.getCommitted() / M)
+      .addGauge(HEAP_USED_KEY, HEAP_USED_DESC, memHeap.getUsed() / M)
+      .addGauge(HEAP_COMMITTED_KEY, HEAP_COMMITTED_DESC,
+                memHeap.getCommitted() / M);
+  }
+
+  private void getGcUsage(MetricsRecordBuilder rb) {
+    long count = 0;
+    long timeMillis = 0;
+    for (GarbageCollectorMXBean gcBean : gcBeans) {
+      count += gcBean.getCollectionCount();
+      timeMillis += gcBean.getCollectionTime();
+    }
+    rb.addCounter(GC_COUNT_KEY, GC_COUNT_DESC, count)
+      .addCounter(GC_TIME_KEY, GC_TIME_DESC, timeMillis);
+  }
+
+  private void getThreadUsage(MetricsRecordBuilder rb) {
+    long threadIds[] = threadMXBean.getAllThreadIds();
+    ThreadInfo[] threadInfos = threadMXBean.getThreadInfo(threadIds, 0);
+    int threadsNew = 0;
+    int threadsRunnable = 0;
+    int threadsBlocked = 0;
+    int threadsWaiting = 0;
+    int threadsTimedWaiting = 0;
+    int threadsTerminated = 0;
+
+    for (ThreadInfo threadInfo : threadInfos) {
+      // threadInfo is null if the thread is not alive or doesn't exist
+      if (threadInfo == null) {
+        continue;
+      }
+      Thread.State state = threadInfo.getThreadState();
+      if (state == NEW) {
+        threadsNew++;
+      } else if (state == RUNNABLE) {
+        threadsRunnable++;
+      } else if (state == BLOCKED) {
+        threadsBlocked++;
+      } else if (state == WAITING) {
+        threadsWaiting++;
+      } else if (state == TIMED_WAITING) {
+        threadsTimedWaiting++;
+      } else if (state == TERMINATED) {
+        threadsTerminated++;
+      }
+    }
+    rb.addGauge(THREADS_NEW_KEY, THREADS_NEW_DESC, threadsNew)
+      .addGauge(THREADS_RUNNABLE_KEY, THREADS_RUNNABLE_DESC, threadsRunnable)
+      .addGauge(THREADS_BLOCKED_KEY, THREADS_BLOCKED_DESC, threadsBlocked)
+      .addGauge(THREADS_WAITING_KEY, THREADS_WAITING_DESC, threadsWaiting)
+      .addGauge(THREADS_TIMEDWAITING_KEY, THREADS_TIMEDWAITING_DESC,
+                threadsTimedWaiting)
+      .addGauge(THREADS_TERMINATED_KEY, THREADS_TERMINATED_DESC,
+                threadsTerminated);
+  }
+
+  private void getEventCounters(MetricsRecordBuilder rb) {
+    rb.addCounter(LOG_FATAL_KEY, LOG_FATAL_DESC, EventCounter.getFatal())
+      .addCounter(LOG_ERROR_KEY, LOG_ERROR_DESC, EventCounter.getError())
+      .addCounter(LOG_WARN_KEY, LOG_WARN_DESC, EventCounter.getWarn())
+      .addCounter(LOG_INFO_KEY, LOG_INFO_DESC, EventCounter.getInfo());
+  }
+
+}

+ 127 - 0
src/core/org/apache/hadoop/metrics2/util/Contracts.java

@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+/**
+ * Utilities for programming by contract (preconditions, postconditions etc.)
+ */
+public class Contracts {
+
+  private Contracts() {}
+
+  /**
+   * Check that a reference is not null.
+   * @param <T> type of the reference
+   * @param ref the reference to check
+   * @param msg the error message
+   * @throws NullPointerException if {@code ref} is null
+   * @return the checked reference for convenience
+   */
+  public static <T> T checkNotNull(T ref, Object msg) {
+    if (ref == null) {
+      throw new NullPointerException(String.valueOf(msg) +": "+
+                                     ref.getClass().getName());
+    }
+    return ref;
+  }
+
+  /**
+   * Check the state expression for false conditions
+   * @param expression  the boolean expression to check
+   * @param msg the error message if {@code expression} is false
+   * @throws IllegalStateException if {@code expression} is false
+   */
+  public static void checkState(boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalStateException(String.valueOf(msg));
+    }
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param <T> type of the argument
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static <T> T checkArg(T arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static int checkArg(int arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg)+ ": "+arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static long checkArg(long arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg));
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static float checkArg(float arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+  /**
+   * Check an argument for false conditions
+   * @param arg the argument to check
+   * @param expression  the boolean expression for the condition
+   * @param msg the error message if {@code expression} is false
+   * @return the argument for convenience
+   */
+  public static double checkArg(double arg, boolean expression, Object msg) {
+    if (!expression) {
+      throw new IllegalArgumentException(String.valueOf(msg) +": "+ arg);
+    }
+    return arg;
+  }
+
+}

+ 25 - 23
src/core/org/apache/hadoop/metrics/util/MBeanUtil.java → src/core/org/apache/hadoop/metrics2/util/MBeans.java

@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.metrics.util;
+package org.apache.hadoop.metrics2.util;
 
 import java.lang.management.ManagementFactory;
 
@@ -24,63 +24,65 @@ import javax.management.MBeanServer;
 import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
 import javax.management.InstanceAlreadyExistsException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 
 /**
  * This util class provides a method to register an MBean using
  * our standard naming convention as described in the doc
- *  for {link {@link #registerMBean(String, String, Object)}
+ *  for {link {@link #register(String, String, Object)}
  *
  */
-public class MBeanUtil {
-	
+public class MBeans {
+
+  private static final Log LOG = LogFactory.getLog(MBeans.class);
+
   /**
    * Register the MBean using our standard MBeanName format
    * "hadoop:service=<serviceName>,name=<nameName>"
    * Where the <serviceName> and <nameName> are the supplied parameters
-   *    
+   *
    * @param serviceName
    * @param nameName
    * @param theMbean - the MBean to register
    * @return the named used to register the MBean
-   */	
-  static public ObjectName registerMBean(final String serviceName, 
-		  							final String nameName,
-		  							final Object theMbean) {
+   */
+  static public ObjectName register(String serviceName, String nameName,
+                                    Object theMbean) {
     final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
     ObjectName name = getMBeanName(serviceName, nameName);
     try {
       mbs.registerMBean(theMbean, name);
       return name;
     } catch (InstanceAlreadyExistsException ie) {
-      // Ignore if instance already exists 
+      LOG.warn(name, ie);
     } catch (Exception e) {
-      e.printStackTrace();
+      LOG.warn("Error registering "+ name, e);
     }
     return null;
   }
-  
-  static public void unregisterMBean(ObjectName mbeanName) {
+
+  static public void unregister(ObjectName mbeanName) {
     final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-    if (mbeanName == null) 
+    if (mbeanName == null)
         return;
     try {
       mbs.unregisterMBean(mbeanName);
     } catch (InstanceNotFoundException e ) {
-      // ignore
+      LOG.warn(mbeanName, e);
     } catch (Exception e) {
-      e.printStackTrace();
-    } 
+      LOG.warn("Error unregistering "+ mbeanName, e);
+    }
   }
-  
-  static private ObjectName getMBeanName(final String serviceName,
-		  								 final String nameName) {
+
+  static private ObjectName getMBeanName(String serviceName, String nameName) {
     ObjectName name = null;
+    String nameStr = "Hadoop:service="+ serviceName +",name="+ nameName;
     try {
-      name = new ObjectName("hadoop:" +
-                  "service=" + serviceName + ",name=" + nameName);
+      name = new ObjectName(nameStr);
     } catch (MalformedObjectNameException e) {
-      e.printStackTrace();
+      LOG.warn("Error creating MBean object name: "+ nameStr, e);
     }
     return name;
   }

+ 167 - 0
src/core/org/apache/hadoop/metrics2/util/SampleStat.java

@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+/**
+ * Helper to compute running sample stats
+ */
+public class SampleStat {
+
+  private final MinMax minmax = new MinMax();
+  private long numSamples = 0;
+  private double a0, a1, s0, s1;
+
+  /**
+   * Construct a new running sample stat
+   */
+  public SampleStat() {
+    a0 = s0 = 0.0;
+  }
+
+  public void reset() {
+    numSamples = 0;
+    a0 = s0 = 0.0;
+    minmax.reset();
+  }
+
+  // We want to reuse the object, sometimes.
+  void reset(long numSamples, double a0, double a1, double s0, double s1,
+             MinMax minmax) {
+    this.numSamples = numSamples;
+    this.a0 = a0;
+    this.a1 = a1;
+    this.s0 = s0;
+    this.s1 = s1;
+    this.minmax.reset(minmax);
+  }
+
+  /**
+   * Copy the values to other (saves object creation and gc.)
+   * @param other the destination to hold our values
+   */
+  public void copyTo(SampleStat other) {
+    other.reset(numSamples, a0, a1, s0, s1, minmax);
+  }
+
+  /**
+   * Add a sample the running stat.
+   * @param x the sample number
+   * @return  self
+   */
+  public SampleStat add(double x) {
+    minmax.add(x);
+    return add(1, x);
+  }
+
+  /**
+   * Add some sample and a partial sum to the running stat.
+   * Note, min/max is not evaluated using this method.
+   * @param nSamples  number of samples
+   * @param x the partial sum
+   * @return  self
+   */
+  public SampleStat add(long nSamples, double x) {
+    numSamples += nSamples;
+
+    if (numSamples == 1) {
+      a0 = a1 = x;
+      s0 = 0.0;
+    }
+    else {
+      // The Welford method for numerical stability
+      a1 = a0 + (x - a0) / numSamples;
+      s1 = s0 + (x - a0) * (x - a1);
+      a0 = a1;
+      s0 = s1;
+    }
+    return this;
+  }
+
+  /**
+   * @return  the total number of samples
+   */
+  public long numSamples() {
+    return numSamples;
+  }
+
+  /**
+   * @return  the arithmetic mean of the samples
+   */
+  public double mean() {
+    return numSamples > 0 ? a1 : 0.0;
+  }
+
+  /**
+   * @return  the variance of the samples
+   */
+  public double variance() {
+    return numSamples > 1 ? s1 / (numSamples - 1) : 0.0;
+  }
+
+  /**
+   * @return  the standard deviation of the samples
+   */
+  public double stddev() {
+    return Math.sqrt(variance());
+  }
+
+  /**
+   * @return  the minimum value of the samples
+   */
+  public double min() {
+    return minmax.min();
+  }
+
+  /**
+   * @return  the maximum value of the samples
+   */
+  public double max() {
+    return minmax.max();
+  }
+
+  /**
+   * Helper to keep running min/max
+   */
+  @SuppressWarnings("PublicInnerClass")
+  public static class MinMax {
+
+    private double min = Double.MAX_VALUE;
+    private double max = Double.MIN_VALUE;
+
+    public void add(double value) {
+      if (value > max) max = value;
+      if (value < min) min = value;
+    }
+
+    public double min() { return min; }
+    public double max() { return max; }
+
+    public void reset() {
+      min = Double.MAX_VALUE;
+      max = Double.MIN_VALUE;
+    }
+
+    public void reset(MinMax other) {
+      min = other.min();
+      max = other.max();
+    }
+
+  }
+
+}

+ 120 - 0
src/core/org/apache/hadoop/metrics2/util/TryIterator.java

@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.util;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+
+/**
+ * A base class for unmodifiable iterators (throws on remove)
+ *
+ * This class also makes writing filtering iterators easier, where the only
+ * way to discover the end of data is by trying to read it. The same applies
+ * to writing iterator wrappers around stream read calls.
+ *
+ * One only needs to implement the tryNext() method and call done() when done.
+ *
+ * @param <T> the type of the iterator
+ */
+public abstract class TryIterator<T> implements Iterator<T> {
+
+  enum State {
+    PENDING,  // Ready to tryNext().
+    GOT_NEXT, // Got the next element from tryNext() and yet to return it.
+    DONE,     // Done/finished.
+    FAILED,   // An exception occurred in the last op.
+  }
+
+  private State state = State.PENDING;
+  private T next;
+
+  /**
+   * Return the next element. Must call {@link #done()} when done, otherwise
+   * infinite loop could occur. If this method throws an exception, any
+   * further attempts to use the iterator would result in an
+   * {@link IllegalStateException}.
+   *
+   * @return the next element if there is one or return {@link #done()}
+   */
+  protected abstract T tryNext();
+
+  /**
+   * Implementations of {@link #tryNext} <b>must</b> call this method
+   * when there are no more elements left in the iteration.
+   *
+   * @return  null as a convenience to implement {@link #tryNext()}
+   */
+  protected final T done() {
+    state = State.DONE;
+    return null;
+  }
+
+  /**
+   * @return  true if we have a next element or false otherwise.
+   */
+  public final boolean hasNext() {
+    if (state == State.FAILED)
+      throw new IllegalStateException();
+
+    switch (state) {
+      case DONE:      return false;
+      case GOT_NEXT:  return true;
+      default:
+    }
+
+    // handle tryNext
+    state = State.FAILED; // just in case
+    next = tryNext();
+
+    if (state != State.DONE) {
+      state = State.GOT_NEXT;
+      return true;
+    }
+    return false;
+  }
+
+  /**
+   * @return  the next element if we have one.
+   */
+  public final T next() {
+    if (!hasNext()) {
+      throw new NoSuchElementException();
+    }
+    state = State.PENDING;
+    return next;
+  }
+
+  /**
+   * @return the current element without advancing the iterator
+   */
+  public final T current() {
+    if (!hasNext()) {
+      throw new NoSuchElementException();
+    }
+    return next;
+  }
+
+  /**
+   * Guaranteed to throw UnsupportedOperationException
+   */
+  public final void remove() {
+    throw new UnsupportedOperationException("Not allowed.");
+  }
+
+}

+ 56 - 0
src/core/org/apache/hadoop/security/UgiInstrumentation.java

@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.security;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.MetricsBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricMutableStat;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+
+class UgiInstrumentation implements MetricsSource {
+
+  final MetricsRegistry registry = new MetricsRegistry("ugi");
+  final MetricMutableStat loginSuccess = registry.newStat("loginSuccess");
+  final MetricMutableStat loginFailure = registry.newStat("loginFailure");
+
+  @Override
+  public void getMetrics(MetricsBuilder builder, boolean all) {
+    registry.snapshot(builder.addRecord(registry.name()), all);
+  }
+
+  void addLoginSuccess(long elapsed) {
+    loginSuccess.add(elapsed);
+  }
+
+  void addLoginFailure(long elapsed) {
+    loginFailure.add(elapsed);
+  }
+
+  static UgiInstrumentation create(Configuration conf) {
+    return create(conf, DefaultMetricsSystem.INSTANCE);
+  }
+
+  static UgiInstrumentation create(Configuration conf, MetricsSystem ms) {
+    return ms.register("ugi", "User/group metrics", new UgiInstrumentation());
+  }
+
+}

+ 8 - 49
src/core/org/apache/hadoop/security/UserGroupInformation.java

@@ -50,13 +50,6 @@ import javax.security.auth.spi.LoginModule;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.util.MetricsBase;
-import org.apache.hadoop.metrics.util.MetricsRegistry;
-import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -79,41 +72,6 @@ public class UserGroupInformation {
    */
   private static final float TICKET_RENEW_WINDOW = 0.80f;
   
-  /** 
-   * UgiMetrics maintains UGI activity statistics
-   * and publishes them through the metrics interfaces.
-   */
-  static class UgiMetrics implements Updater {
-    final MetricsTimeVaryingRate loginSuccess;
-    final MetricsTimeVaryingRate loginFailure;
-    private final MetricsRecord metricsRecord;
-    private final MetricsRegistry registry;
-
-    UgiMetrics() {
-      registry = new MetricsRegistry();
-      loginSuccess = new MetricsTimeVaryingRate("loginSuccess", registry,
-          "Rate of successful kerberos logins and time taken in milliseconds");
-      loginFailure = new MetricsTimeVaryingRate("loginFailure", registry,
-          "Rate of failed kerberos logins and time taken in milliseconds");
-      final MetricsContext metricsContext = MetricsUtil.getContext("ugi");
-      metricsRecord = MetricsUtil.createRecord(metricsContext, "ugi");
-      metricsContext.registerUpdater(this);
-    }
-
-    /**
-     * Push the metrics to the monitoring subsystem on doUpdate() call.
-     */
-    @Override
-    public void doUpdates(final MetricsContext context) {
-      synchronized (this) {
-        for (MetricsBase m : registry.getMetricsList()) {
-          m.pushMetric(metricsRecord);
-        }
-      }
-      metricsRecord.update();
-    }
-  }
-  
   /**
    * A login module that looks at the Kerberos, Unix, or Windows principal and
    * adds the corresponding UserName.
@@ -175,7 +133,7 @@ public class UserGroupInformation {
   }
 
   /** Metrics to track UGI activity */
-  static UgiMetrics metrics = new UgiMetrics();
+  static UgiInstrumentation metrics;
   /** Are the static variables that depend on configuration initialized? */
   private static boolean isInitialized = false;
   /** Should we use Kerberos configuration? */
@@ -235,6 +193,7 @@ public class UserGroupInformation {
     }
     isInitialized = true;
     UserGroupInformation.conf = conf;
+    metrics = UgiInstrumentation.create(conf);
   }
 
   /**
@@ -583,13 +542,13 @@ public class UserGroupInformation {
         new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject);
       start = System.currentTimeMillis();
       login.login();
-      metrics.loginSuccess.inc(System.currentTimeMillis() - start);
+      metrics.addLoginSuccess(System.currentTimeMillis() - start);
       loginUser = new UserGroupInformation(subject);
       loginUser.setLogin(login);
       loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.inc(System.currentTimeMillis() - start);
+        metrics.addLoginFailure(System.currentTimeMillis() - start);
       }
       throw new IOException("Login failure for " + user + " from keytab " + 
                             path, le);
@@ -667,7 +626,7 @@ public class UserGroupInformation {
        
       start = System.currentTimeMillis();
       login.login();
-      metrics.loginSuccess.inc(System.currentTimeMillis() - start);
+      metrics.addLoginSuccess(System.currentTimeMillis() - start);
       UserGroupInformation newLoginUser = new UserGroupInformation(subject);
       newLoginUser.setLogin(login);
       newLoginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
@@ -675,7 +634,7 @@ public class UserGroupInformation {
       return newLoginUser;
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.inc(System.currentTimeMillis() - start);
+        metrics.addLoginFailure(System.currentTimeMillis() - start);
       }
       throw new IOException("Login failure for " + user + " from keytab " + 
                             path, le);
@@ -722,11 +681,11 @@ public class UserGroupInformation {
       LOG.info("Initiating re-login for " + keytabPrincipal);
       start = System.currentTimeMillis();
       login.login();
-      metrics.loginSuccess.inc(System.currentTimeMillis() - start);
+      metrics.addLoginSuccess(System.currentTimeMillis() - start);
       setLogin(login);
     } catch (LoginException le) {
       if (start > 0) {
-        metrics.loginFailure.inc(System.currentTimeMillis() - start);
+        metrics.addLoginFailure(System.currentTimeMillis() - start);
       }
       throw new IOException("Login failure for " + keytabPrincipal + 
           " from keytab " + keytabFile, le);

+ 24 - 1
src/core/org/apache/hadoop/util/StringUtils.java

@@ -695,9 +695,32 @@ public class StringUtils {
    *
    * @param separator Separator to join with.
    * @param strings Strings to join.
+   * @return  the joined string
    */
   public static String join(CharSequence separator, Iterable<String> strings) {
-    StringBuffer sb = new StringBuffer();
+    StringBuilder sb = new StringBuilder();
+    boolean first = true;
+    for (String s : strings) {
+      if (first) {
+        first = false;
+      } else {
+        sb.append(separator);
+      }
+      sb.append(s);
+    }
+    return sb.toString();
+  }
+
+  /**
+   * Concatenates strings, using a separator.
+   *
+   * @param separator to join with
+   * @param strings to join
+   * @return  the joined string
+   */
+  public static String join(CharSequence separator, String[] strings) {
+    // Ideally we don't have to duplicate the code here if array is iterable.
+    StringBuilder sb = new StringBuilder();
     boolean first = true;
     for (String s : strings) {
       if (first) {

+ 4 - 4
src/hdfs/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java

@@ -469,7 +469,7 @@ class BlockReceiver implements java.io.Closeable, FSConstants {
           } else {
             checksumOut.write(pktBuf, checksumOff, checksumLen);
           }
-          datanode.myMetrics.bytesWritten.inc(len);
+          datanode.myMetrics.incrBytesWritten(len);
         }
       } catch (IOException iex) {
         datanode.checkDiskError(iex);
@@ -553,7 +553,7 @@ class BlockReceiver implements java.io.Closeable, FSConstants {
         // Finalize the block. Does this fsync()?
         block.setNumBytes(offsetInBlock);
         datanode.data.finalizeBlock(block);
-        datanode.myMetrics.blocksWritten.inc();
+        datanode.myMetrics.incrBlocksWritten();
       }
 
     } catch (IOException ioe) {
@@ -810,7 +810,7 @@ class BlockReceiver implements java.io.Closeable, FSConstants {
                 final long endTime = ClientTraceLog.isInfoEnabled() ? System.nanoTime() : 0;
                 block.setNumBytes(receiver.offsetInBlock);
                 datanode.data.finalizeBlock(block);
-                datanode.myMetrics.blocksWritten.inc();
+                datanode.myMetrics.incrBlocksWritten();
                 datanode.notifyNamenodeReceivedBlock(block, 
                     DataNode.EMPTY_DEL_HINT);
                 if (ClientTraceLog.isInfoEnabled() &&
@@ -944,7 +944,7 @@ class BlockReceiver implements java.io.Closeable, FSConstants {
               final long endTime = ClientTraceLog.isInfoEnabled() ? System.nanoTime() : 0;
               block.setNumBytes(receiver.offsetInBlock);
               datanode.data.finalizeBlock(block);
-              datanode.myMetrics.blocksWritten.inc();
+              datanode.myMetrics.incrBlocksWritten();
               datanode.notifyNamenodeReceivedBlock(block, 
                   DataNode.EMPTY_DEL_HINT);
               if (ClientTraceLog.isInfoEnabled() &&

部分文件因为文件数量过多而无法显示