Pārlūkot izejas kodu

AMBARI-14051 Remove httpclient library dep from AbastractTimelineMetricsSink to respect timeout settings (dsen)

Dmytro Sen 9 gadi atpakaļ
vecāks
revīzija
689cfea16f
13 mainītis faili ar 95 papildinājumiem un 100 dzēšanām
  1. 13 7
      ambari-metrics/ambari-metrics-common/pom.xml
  2. 21 17
      ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
  3. 30 18
      ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java
  4. 1 4
      ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
  5. 10 6
      ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
  6. 0 4
      ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
  7. 9 10
      ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
  8. 1 1
      ambari-metrics/ambari-metrics-kafka-sink/pom.xml
  9. 0 6
      ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java
  10. 6 8
      ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java
  11. 0 5
      ambari-metrics/ambari-metrics-storm-sink/pom.xml
  12. 0 3
      ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java
  13. 4 11
      ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java

+ 13 - 7
ambari-metrics/ambari-metrics-common/pom.xml

@@ -62,11 +62,6 @@
       <artifactId>commons-logging</artifactId>
       <version>1.1.1</version>
     </dependency>
-    <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>3.1</version>
-    </dependency>
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-xc</artifactId>
@@ -89,8 +84,19 @@
       <version>4.10</version>
     </dependency>
     <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <version>3.2</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>

+ 21 - 17
ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java

@@ -17,9 +17,6 @@
  */
 package org.apache.hadoop.metrics2.sink.timeline;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.PostMethod;
-import org.apache.commons.httpclient.methods.StringRequestEntity;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.map.AnnotationIntrospector;
@@ -28,7 +25,9 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
 
 import java.io.IOException;
-import java.net.ConnectException;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
 
 public abstract class AbstractTimelineMetricsSink {
   public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix.";
@@ -40,7 +39,6 @@ public abstract class AbstractTimelineMetricsSink {
   public static final int DEFAULT_POST_TIMEOUT_SECONDS = 10;
 
   protected final Log LOG;
-  private HttpClient httpClient = new HttpClient();
 
   protected static ObjectMapper mapper;
 
@@ -54,20 +52,30 @@ public abstract class AbstractTimelineMetricsSink {
 
   public AbstractTimelineMetricsSink() {
     LOG = LogFactory.getLog(this.getClass());
-    httpClient.getParams().setSoTimeout(getTimeoutSeconds() * 1000);
-    httpClient.getParams().setConnectionManagerTimeout(getTimeoutSeconds() * 1000);
   }
 
-  protected void emitMetrics(TimelineMetrics metrics) throws IOException {
+  protected void emitMetrics(TimelineMetrics metrics) {
     String connectUrl = getCollectorUri();
+    int timeout = getTimeoutSeconds() * 1000;
     try {
       String jsonData = mapper.writeValueAsString(metrics);
 
-      StringRequestEntity requestEntity = new StringRequestEntity(jsonData, "application/json", "UTF-8");
+      HttpURLConnection connection =
+        (HttpURLConnection) new URL(connectUrl).openConnection();
 
-      PostMethod postMethod = new PostMethod(connectUrl);
-      postMethod.setRequestEntity(requestEntity);
-      int statusCode = httpClient.executeMethod(postMethod);
+      connection.setRequestMethod("POST");
+      connection.setRequestProperty("Content-Type", "application/json");
+      connection.setConnectTimeout(timeout);
+      connection.setReadTimeout(timeout);
+      connection.setDoOutput(true);
+
+      if (jsonData != null) {
+        try (OutputStream os = connection.getOutputStream()) {
+          os.write(jsonData.getBytes("UTF-8"));
+        }
+      }
+
+      int statusCode = connection.getResponseCode();
 
       if (statusCode != 200) {
         LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
@@ -75,15 +83,11 @@ public abstract class AbstractTimelineMetricsSink {
       } else {
         LOG.debug("Metrics posted to Collector " + connectUrl);
       }
-    } catch (ConnectException e) {
+    } catch (IOException e) {
       throw new UnableToConnectException(e).setConnectUrl(connectUrl);
     }
   }
 
-  public void setHttpClient(HttpClient httpClient) {
-    this.httpClient = httpClient;
-  }
-
   abstract protected String getCollectorUri();
 
   abstract protected int getTimeoutSeconds();

+ 30 - 18
ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/cache/HandleConnectExceptionTest.java

@@ -18,12 +18,10 @@
 package org.apache.hadoop.metrics2.sink.timeline.cache;
 
 import java.io.IOException;
-import java.net.ConnectException;
-import java.net.InetSocketAddress;
-import java.net.SocketAddress;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethod;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.UnableToConnectException;
@@ -31,27 +29,40 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
-@RunWith(MockitoJUnitRunner.class)
+import static org.easymock.EasyMock.expect;
+import static org.powermock.api.easymock.PowerMock.createNiceMock;
+import static org.powermock.api.easymock.PowerMock.expectNew;
+import static org.powermock.api.easymock.PowerMock.replayAll;
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({AbstractTimelineMetricsSink.class, URL.class,
+  HttpURLConnection.class})
 public class HandleConnectExceptionTest {
   private static final String COLLECTOR_URL = "collector";
-  @Mock private HttpClient client;
   private TestTimelineMetricsSink sink;
   
-  @Before public void init(){
+  @Before
+  public void init(){
     sink = new TestTimelineMetricsSink();
-    sink.setHttpClient(client);
-    
+    OutputStream os = createNiceMock(OutputStream.class);
+    HttpURLConnection connection = createNiceMock(HttpURLConnection.class);
+    URL url = createNiceMock(URL.class);
+
     try {
-      Mockito.when(client.executeMethod(Mockito.<HttpMethod>any())).thenThrow(new ConnectException());
-    } catch (IOException e) {
+      expectNew(URL.class, "collector").andReturn(url);
+      expect(url.openConnection()).andReturn(connection).once();
+      expect(connection.getOutputStream()).andReturn(os).once();
+      expect(connection.getResponseCode()).andThrow(new IOException());
+
+      replayAll();
+    } catch (Exception e) {
       //no-op
     }
-  } 
-  
+  }
+
   @Test
   public void handleTest(){
     try{
@@ -63,6 +74,7 @@ public class HandleConnectExceptionTest {
       Assert.fail(e.getMessage());
     }
   }
+
   class TestTimelineMetricsSink extends AbstractTimelineMetricsSink{
     @Override
     protected String getCollectorUri() {
@@ -75,7 +87,7 @@ public class HandleConnectExceptionTest {
     }
 
     @Override
-    public void emitMetrics(TimelineMetrics metrics) throws IOException {
+    public void emitMetrics(TimelineMetrics metrics) {
       super.emitMetrics(metrics);
     }
   }

+ 1 - 4
ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java

@@ -30,7 +30,6 @@ import static org.powermock.api.easymock.PowerMock.verifyAll;
 import java.net.InetAddress;
 import java.util.Collections;
 
-import org.apache.commons.httpclient.HttpClient;
 import org.apache.flume.Context;
 import org.apache.flume.instrumentation.util.JMXPollUtil;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
@@ -87,14 +86,12 @@ public class FlumeTimelineMetricsSinkTest {
     FlumeTimelineMetricsSink flumeTimelineMetricsSink = new FlumeTimelineMetricsSink();
     TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(flumeTimelineMetricsSink);
     flumeTimelineMetricsSink.setPollFrequency(1);
-    HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-    flumeTimelineMetricsSink.setHttpClient(httpClient);
     mockStatic(JMXPollUtil.class);
     EasyMock.expect(JMXPollUtil.getAllMBeans()).andReturn(
         Collections.singletonMap("component1", Collections.singletonMap("key1", "42"))).once();
     flumeTimelineMetricsSink.start();
     flumeTimelineMetricsSink.stop();
-    replay(JMXPollUtil.class, timelineMetricsCache, httpClient);
+    replay(JMXPollUtil.class, timelineMetricsCache);
     flumeTimelineMetricsSink.start();
     Thread.sleep(5);
     flumeTimelineMetricsSink.stop();

+ 10 - 6
ambari-metrics/ambari-metrics-hadoop-sink/pom.xml

@@ -144,12 +144,6 @@ limitations under the License.
       <version>2.4.0</version>
       <scope>compile</scope>
     </dependency>
-    <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>3.1</version>
-      <scope>compile</scope>
-    </dependency>
     <dependency>
       <groupId>commons-codec</groupId>
       <artifactId>commons-codec</artifactId>
@@ -196,6 +190,16 @@ limitations under the License.
       <version>3.2</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-api-easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-module-junit4</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
 </project>

+ 0 - 4
ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java

@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.metrics2.sink.timeline;
 
-import java.io.IOException;
 import java.net.SocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -35,7 +34,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.metrics2.AbstractMetric;
-import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.MetricsTag;
@@ -212,8 +210,6 @@ public class HadoopTimelineMetricsSink extends AbstractTimelineMetricsSink imple
       }
     } catch (UnableToConnectException uce) {
       LOG.warn("Unable to send metrics to collector by address:" + uce.getConnectUrl());
-    } catch (IOException io) {
-      throw new MetricsException("Failed to putMetrics", io);
     }
   }
 

+ 9 - 10
ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java

@@ -31,24 +31,29 @@ import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
 
+import java.io.OutputStream;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
 
 import org.apache.commons.configuration.SubsetConfiguration;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.easymock.EasyMock;
 import org.easymock.IAnswer;
 import org.junit.Assert;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 
+@RunWith(PowerMockRunner.class)
 public class HadoopTimelineMetricsSinkTest {
 
   @Test
+  @PrepareForTest({URL.class, OutputStream.class})
   public void testPutMetrics() throws Exception {
     HadoopTimelineMetricsSink sink = new HadoopTimelineMetricsSink();
 
@@ -82,11 +87,6 @@ public class HadoopTimelineMetricsSinkTest {
       }
     }).once();
 
-
-    HttpClient httpClient = createNiceMock(HttpClient.class);
-
-    expect(httpClient.executeMethod(anyObject(PostMethod.class))).andReturn(200).once(); //metrics send only once due to caching
-
     AbstractMetric metric = createNiceMock(AbstractMetric.class);
     expect(metric.name()).andReturn("metricName").anyTimes();
     expect(metric.value()).andReturn(9.5687).anyTimes();
@@ -105,9 +105,8 @@ public class HadoopTimelineMetricsSinkTest {
     expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes();
 
 
-    replay(conf, httpClient, record, metric);
+    replay(conf, record, metric);
 
-    sink.setHttpClient(httpClient);
     sink.init(conf);
 
     sink.putMetrics(record);
@@ -116,7 +115,7 @@ public class HadoopTimelineMetricsSinkTest {
 
     sink.putMetrics(record);
 
-    verify(conf, httpClient, record, metric);
+    verify(conf, record, metric);
   }
 
   @Test

+ 1 - 1
ambari-metrics/ambari-metrics-kafka-sink/pom.xml

@@ -43,7 +43,7 @@ limitations under the License.
               <goal>copy-dependencies</goal>
             </goals>
             <configuration>
-              <includeArtifactIds>commons-codec,commons-collections,commons-httpclient,commons-lang,commons-logging,jackson-core-asl,jackson-mapper-asl,jackson-xc</includeArtifactIds>
+              <includeArtifactIds>commons-codec,commons-collections,commons-lang,commons-logging,jackson-core-asl,jackson-mapper-asl,jackson-xc</includeArtifactIds>
               <outputDirectory>${project.build.directory}/lib</outputDirectory>
             </configuration>
           </execution>

+ 0 - 6
ambari-metrics/ambari-metrics-kafka-sink/src/main/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporter.java

@@ -43,15 +43,11 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 
-import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
@@ -255,8 +251,6 @@ public class KafkaTimelineMetricsReporter extends AbstractTimelineMetricsSink
         timelineMetrics.setMetrics(metricsList);
         try {
           emitMetrics(timelineMetrics);
-        } catch (IOException e) {
-          LOG.error("Unexpected error", e);
         } catch (Throwable t) {
           LOG.error("Exception emitting metrics", t);
         }

+ 6 - 8
ambari-metrics/ambari-metrics-kafka-sink/src/test/java/org/apache/hadoop/metrics2/sink/kafka/KafkaTimelineMetricsReporterTest.java

@@ -28,7 +28,6 @@ import com.yammer.metrics.core.MetricsRegistry;
 import com.yammer.metrics.core.Timer;
 import junit.framework.Assert;
 import kafka.utils.VerifiableProperties;
-import org.apache.commons.httpclient.HttpClient;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 import org.easymock.EasyMock;
@@ -38,6 +37,9 @@ import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.OutputStream;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
@@ -48,7 +50,7 @@ import static org.powermock.api.easymock.PowerMock.replay;
 import static org.powermock.api.easymock.PowerMock.verifyAll;
 
 @RunWith(PowerMockRunner.class)
-@PrepareForTest({ Metrics.class, HttpClient.class,
+@PrepareForTest({ Metrics.class, URL.class, OutputStream.class,
   KafkaTimelineMetricsReporter.TimelineScheduledReporter.class })
 @PowerMockIgnore({"javax.management.*", "org.apache.log4j.*", "org.slf4j.*"})
 public class KafkaTimelineMetricsReporterTest {
@@ -90,9 +92,7 @@ public class KafkaTimelineMetricsReporterTest {
     EasyMock.expect(Metrics.defaultRegistry()).andReturn(registry).times(2);
     TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(kafkaTimelineMetricsReporter);
     kafkaTimelineMetricsReporter.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-    kafkaTimelineMetricsReporter.setHttpClient(httpClient);
-    replay(Metrics.class, httpClient, timelineMetricsCache);
+    replay(Metrics.class, timelineMetricsCache);
     kafkaTimelineMetricsReporter.init(props);
     kafkaTimelineMetricsReporter.stopReporter();
     verifyAll();
@@ -104,10 +104,8 @@ public class KafkaTimelineMetricsReporterTest {
     EasyMock.expect(Metrics.defaultRegistry()).andReturn(registry).times(2);
     TimelineMetricsCache timelineMetricsCache = getTimelineMetricsCache(kafkaTimelineMetricsReporter);
     kafkaTimelineMetricsReporter.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-    kafkaTimelineMetricsReporter.setHttpClient(httpClient);
 
-    replay(Metrics.class, httpClient, timelineMetricsCache);
+    replay(Metrics.class, timelineMetricsCache);
     kafkaTimelineMetricsReporter.init(props);
 
     Assert.assertTrue(kafkaTimelineMetricsReporter.isExcludedMetric("a.b.c"));

+ 0 - 5
ambari-metrics/ambari-metrics-storm-sink/pom.xml

@@ -102,7 +102,6 @@ limitations under the License.
               <include>org.codehaus.jackson:jackson-core-asl</include>
               <include>org.codehaus.jackson:jackson-xc</include>
               <include>org.apache.hadoop:hadoop-annotations</include>
-              <include>commons-httpclient:commons-httpclient</include>
               <include>commons-logging:commons-logging</include>
               <include>org.apache.commons:commons-lang3</include>
               <include>commons-codec:commons-codec</include>
@@ -113,10 +112,6 @@ limitations under the License.
               <pattern>org.apache.commons.logging</pattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.logging</shadedPattern>
             </relocation>
-            <relocation>
-              <pattern>org.apache.commons.httpclient</pattern>
-              <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.httpclient</shadedPattern>
-            </relocation>
             <relocation>
               <pattern>org.apache.hadoop.classification</pattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.hadoop.classification</shadedPattern>

+ 0 - 3
ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java

@@ -31,7 +31,6 @@ import org.apache.hadoop.metrics2.sink.timeline.UnableToConnectException;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 import org.apache.hadoop.metrics2.sink.timeline.configuration.Configuration;
 
-import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -107,8 +106,6 @@ public class StormTimelineMetricsSink extends AbstractTimelineMetricsSink implem
         emitMetrics(timelineMetrics);
       } catch (UnableToConnectException uce) {
         LOG.warn("Unable to send metrics to collector by address:" + uce.getConnectUrl());
-      } catch (IOException e) {
-        LOG.error("Unexpected error", e);
       }
     }
   }

+ 4 - 11
ambari-metrics/ambari-metrics-storm-sink/src/test/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSinkTest.java

@@ -29,8 +29,6 @@ import java.io.IOException;
 import java.net.SocketAddress;
 import java.util.Collections;
 
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.cache.TimelineMetricsCache;
 import org.junit.Test;
@@ -43,13 +41,11 @@ public class StormTimelineMetricsSinkTest {
     StormTimelineMetricsSink stormTimelineMetricsSink = new StormTimelineMetricsSink();
     TimelineMetricsCache timelineMetricsCache = createNiceMock(TimelineMetricsCache.class);
     stormTimelineMetricsSink.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = createNiceMock(HttpClient.class);
-    stormTimelineMetricsSink.setHttpClient(httpClient);
-    replay(timelineMetricsCache, httpClient);
+    replay(timelineMetricsCache);
     stormTimelineMetricsSink.handleDataPoints(
         new IMetricsConsumer.TaskInfo("localhost", 1234, "testComponent", 42, 20000L, 60),
         Collections.singleton(new IMetricsConsumer.DataPoint("key1", "value1")));
-    verify(timelineMetricsCache, httpClient);
+    verify(timelineMetricsCache);
   }
 
   @Test
@@ -61,13 +57,10 @@ public class StormTimelineMetricsSinkTest {
     timelineMetricsCache.putTimelineMetric(anyObject(TimelineMetric.class));
     expectLastCall().once();
     stormTimelineMetricsSink.setMetricsCache(timelineMetricsCache);
-    HttpClient httpClient = createNiceMock(HttpClient.class);
-    stormTimelineMetricsSink.setHttpClient(httpClient);
-    expect(httpClient.executeMethod(anyObject(PostMethod.class))).andReturn(200).once();
-    replay(timelineMetricsCache, httpClient);
+    replay(timelineMetricsCache);
     stormTimelineMetricsSink.handleDataPoints(
         new IMetricsConsumer.TaskInfo("localhost", 1234, "testComponent", 42, 20000L, 60),
         Collections.singleton(new IMetricsConsumer.DataPoint("key1", 42)));
-    verify(timelineMetricsCache, httpClient);
+    verify(timelineMetricsCache);
   }
 }