Browse Source

HADOOP-4675 Current Ganglia metrics implementation is incompatible with Ganglia 3.1 -- reversing this patch

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@810714 13f79535-47bb-0310-9956-ffa450edef68
Michael Stack 15 years ago
parent
commit
e5fef21b18

+ 0 - 3
CHANGES.txt

@@ -515,9 +515,6 @@ Trunk (unreleased changes)
     HADOOP-6224. Add a method to WritableUtils performing a bounded read of an
     encoded String. (Jothi Padmanabhan via cdouglas)
 
-    HADOOP-4675 Current Ganglia metrics implementation is incompatible with Ganglia 3.1
-    (Brian Brockelman, Scott Beardsley via stack)
-
   OPTIMIZATIONS
 
     HADOOP-5595. NameNode does not need to run a replicator to choose a

+ 1 - 5
conf/hadoop-metrics.properties

@@ -7,9 +7,7 @@ dfs.class=org.apache.hadoop.metrics.spi.NullContext
 #dfs.fileName=/tmp/dfsmetrics.log
 
 # Configuration of the "dfs" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
 # dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
 # dfs.period=10
 # dfs.servers=localhost:8649
 
@@ -23,15 +21,13 @@ mapred.class=org.apache.hadoop.metrics.spi.NullContext
 #mapred.fileName=/tmp/mrmetrics.log
 
 # Configuration of the "mapred" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
 # mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
 # mapred.period=10
 # mapred.servers=localhost:8649
 
 
 # Configuration of the "jvm" context for null
-#jvm.class=org.apache.hadoop.metrics.spi.NullContext
+jvm.class=org.apache.hadoop.metrics.spi.NullContext
 
 # Configuration of the "jvm" context for file
 #jvm.class=org.apache.hadoop.metrics.file.FileContext

+ 11 - 11
src/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java

@@ -71,16 +71,16 @@ public class GangliaContext extends AbstractMetricsContext {
     typeTable.put(Float.class, "float");
   }
     
-  protected byte[] buffer = new byte[BUFFER_SIZE];
-  protected int offset;
+  private byte[] buffer = new byte[BUFFER_SIZE];
+  private int offset;
     
-  protected List<? extends SocketAddress> metricsServers;
+  private List<? extends SocketAddress> metricsServers;
   private Map<String,String> unitsTable;
   private Map<String,String> slopeTable;
   private Map<String,String> tmaxTable;
   private Map<String,String> dmaxTable;
     
-  protected DatagramSocket datagramSocket;
+  private DatagramSocket datagramSocket;
     
   /** Creates a new instance of GangliaContext */
   public GangliaContext() {
@@ -132,7 +132,7 @@ public class GangliaContext extends AbstractMetricsContext {
     }
   }
     
-  protected void emitMetric(String name, String type,  String value) 
+  private void emitMetric(String name, String type,  String value) 
   throws IOException {
     String units = getUnits(name);
     int slope = getSlope(name);
@@ -156,7 +156,7 @@ public class GangliaContext extends AbstractMetricsContext {
     }
   }
     
-  protected String getUnits(String metricName) {
+  private String getUnits(String metricName) {
     String result = unitsTable.get(metricName);
     if (result == null) {
       result = DEFAULT_UNITS;
@@ -164,7 +164,7 @@ public class GangliaContext extends AbstractMetricsContext {
     return result;
   }
     
-  protected int getSlope(String metricName) {
+  private int getSlope(String metricName) {
     String slopeString = slopeTable.get(metricName);
     if (slopeString == null) {
       slopeString = DEFAULT_SLOPE; 
@@ -172,7 +172,7 @@ public class GangliaContext extends AbstractMetricsContext {
     return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c
   }
     
-  protected int getTmax(String metricName) {
+  private int getTmax(String metricName) {
     if (tmaxTable == null) {
       return DEFAULT_TMAX;
     }
@@ -185,7 +185,7 @@ public class GangliaContext extends AbstractMetricsContext {
     }
   }
     
-  protected int getDmax(String metricName) {
+  private int getDmax(String metricName) {
     String dmaxString = dmaxTable.get(metricName);
     if (dmaxString == null) {
       return DEFAULT_DMAX;
@@ -200,7 +200,7 @@ public class GangliaContext extends AbstractMetricsContext {
    * as an int, followed by the bytes of the string, padded if necessary to
    * a multiple of 4.
    */
-  protected void xdr_string(String s) {
+  private void xdr_string(String s) {
     byte[] bytes = s.getBytes();
     int len = bytes.length;
     xdr_int(len);
@@ -222,7 +222,7 @@ public class GangliaContext extends AbstractMetricsContext {
   /**
    * Puts an integer into the buffer as 4 bytes, big-endian.
    */
-  protected void xdr_int(int i) {
+  private void xdr_int(int i) {
     buffer[offset++] = (byte)((i >> 24) & 0xff);
     buffer[offset++] = (byte)((i >> 16) & 0xff);
     buffer[offset++] = (byte)((i >> 8) & 0xff);

+ 0 - 144
src/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java

@@ -1,144 +0,0 @@
-/*
- * GangliaContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.ganglia;
-
-import java.io.IOException;
-import java.net.DatagramPacket;
-import java.net.SocketAddress;
-import java.net.UnknownHostException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.net.DNS;
-
-/**
- * Context for sending metrics to Ganglia version 3.1.x.
- * 
- * 3.1.1 has a slightly different wire portal compared to 3.0.x.
- */
-public class GangliaContext31 extends GangliaContext {
-
-  String hostName = "UNKNOWN.example.com";
-
-  private static final Log LOG = 
-    LogFactory.getLog("org.apache.hadoop.util.GangliaContext31");
-
-  public void init(String contextName, ContextFactory factory) {
-    super.init(contextName, factory);
-
-    LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics.");
-
-    // Take the hostname from the DNS class.
-
-    Configuration conf = new Configuration();
-
-    if (conf.get("slave.host.name") != null) {
-      hostName = conf.get("slave.host.name");
-    } else {
-      try {
-        hostName = DNS.getDefaultHost(
-          conf.get("dfs.datanode.dns.interface","default"),
-          conf.get("dfs.datanode.dns.nameserver","default"));
-      } catch (UnknownHostException uhe) {
-        LOG.error(uhe);
-    	hostName = "UNKNOWN.example.com";
-      }
-    }
-  }
-
-  protected void emitMetric(String name, String type,  String value) 
-    throws IOException
-  {
-    if (name == null) {
-      LOG.warn("Metric was emitted with no name.");
-      return;
-    } else if (value == null) {
-      LOG.warn("Metric name " + name +" was emitted with a null value.");
-      return;
-    } else if (type == null) {
-      LOG.warn("Metric name " + name + ", value " + value + " has no type.");
-      return;
-    }
-
-    LOG.debug("Emitting metric " + name + ", type " + type + ", value " + 
-      value + " from hostname" + hostName);
-
-    String units = getUnits(name);
-    if (units == null) {
-      LOG.warn("Metric name " + name + ", value " + value
-        + " had 'null' units");
-      units = "";
-    }
-    int slope = getSlope(name);
-    int tmax = getTmax(name);
-    int dmax = getDmax(name);
-    offset = 0;
-    String groupName = name.substring(0,name.lastIndexOf("."));
-
-    // The following XDR recipe was done through a careful reading of
-    // gm_protocol.x in Ganglia 3.1 and carefully examining the output of
-    // the gmetric utility with strace.
-
-    // First we send out a metadata message
-    xdr_int(128);         // metric_id = metadata_msg
-    xdr_string(hostName); // hostname
-    xdr_string(name);     // metric name
-    xdr_int(0);           // spoof = False
-    xdr_string(type);     // metric type
-    xdr_string(name);     // metric name
-    xdr_string(units);    // units
-    xdr_int(slope);       // slope
-    xdr_int(tmax);        // tmax, the maximum time between metrics
-    xdr_int(dmax);        // dmax, the maximum data value
-
-    xdr_int(1);             /*Num of the entries in extra_value field for 
-                              Ganglia 3.1.x*/
-    xdr_string("GROUP");    /*Group attribute*/
-    xdr_string(groupName);  /*Group value*/
-
-    for (SocketAddress socketAddress : metricsServers) {
-      DatagramPacket packet =
-        new DatagramPacket(buffer, offset, socketAddress);
-      datagramSocket.send(packet);
-    }
-
-    // Now we send out a message with the actual value.
-    // Technically, we only need to send out the metadata message once for
-    // each metric, but I don't want to have to record which metrics we did and
-    // did not send.
-    offset = 0;
-    xdr_int(133);         // we are sending a string value
-    xdr_string(hostName); // hostName
-    xdr_string(name);     // metric name
-    xdr_int(0);           // spoof = False
-    xdr_string("%s");     // format field
-    xdr_string(value);    // metric value
-        
-    for (SocketAddress socketAddress : metricsServers) {
-      DatagramPacket packet = 
-        new DatagramPacket(buffer, offset, socketAddress);
-      datagramSocket.send(packet);
-    }
-  }
-
-}