Kaynağa Gözat

AMBARI-13004. Fix Storm sink compile time deps with latest version updates. (swagle)

Siddharth Wagle 9 yıl önce
ebeveyn
işleme
0263993f69

+ 1 - 1
ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java

@@ -20,13 +20,13 @@ package org.apache.hadoop.metrics2.sink.timeline;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.commons.httpclient.methods.StringRequestEntity;
 import org.apache.commons.httpclient.methods.StringRequestEntity;
-import org.apache.commons.httpclient.params.HttpMethodParams;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.LogFactory;
 import org.codehaus.jackson.map.AnnotationIntrospector;
 import org.codehaus.jackson.map.AnnotationIntrospector;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
 import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
 import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
+
 import java.io.IOException;
 import java.io.IOException;
 import java.net.ConnectException;
 import java.net.ConnectException;
 
 

+ 22 - 6
ambari-metrics/ambari-metrics-storm-sink/pom.xml

@@ -31,7 +31,8 @@ limitations under the License.
   <packaging>jar</packaging>
   <packaging>jar</packaging>
 
 
   <properties>
   <properties>
-    <storm.version>0.9.3.2.2.1.0-2340</storm.version>
+    <!--<storm.version>0.9.3.2.2.1.0-2340</storm.version>-->
+    <storm.version>0.10.0.2.3.2.0-2766</storm.version>
   </properties>
   </properties>
 
 
   <build>
   <build>
@@ -103,7 +104,8 @@ limitations under the License.
               <include>org.apache.hadoop:hadoop-annotations</include>
               <include>org.apache.hadoop:hadoop-annotations</include>
               <include>commons-httpclient:commons-httpclient</include>
               <include>commons-httpclient:commons-httpclient</include>
               <include>commons-logging:commons-logging</include>
               <include>commons-logging:commons-logging</include>
-              <include>commons-lang:commons-lang</include>
+              <include>org.apache.commons:commons-lang3</include>
+              <include>commons-codec:commons-codec</include>
             </includes>
             </includes>
           </artifactSet>
           </artifactSet>
           <relocations>
           <relocations>
@@ -115,10 +117,6 @@ limitations under the License.
               <pattern>org.apache.commons.httpclient</pattern>
               <pattern>org.apache.commons.httpclient</pattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.httpclient</shadedPattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.httpclient</shadedPattern>
             </relocation>
             </relocation>
-            <relocation>
-              <pattern>org.apache.commons.lang</pattern>
-              <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.lang</shadedPattern>
-            </relocation>
             <relocation>
             <relocation>
               <pattern>org.apache.hadoop.classification</pattern>
               <pattern>org.apache.hadoop.classification</pattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.hadoop.classification</shadedPattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.hadoop.classification</shadedPattern>
@@ -127,12 +125,30 @@ limitations under the License.
               <pattern>org.codehaus.jackson</pattern>
               <pattern>org.codehaus.jackson</pattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.jackson</shadedPattern>
               <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.jackson</shadedPattern>
             </relocation>
             </relocation>
+            <relocation>
+              <pattern>org.apache.commons.lang3</pattern>
+              <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.lang3</shadedPattern>
+            </relocation>
+            <relocation>
+              <pattern>org.apache.commons.codec</pattern>
+              <shadedPattern>org.apache.hadoop.metrics2.sink.relocated.commons.codec</shadedPattern>
+            </relocation>
           </relocations>
           </relocations>
         </configuration>
         </configuration>
       </plugin>
       </plugin>
     </plugins>
     </plugins>
   </build>
   </build>
   <dependencies>
   <dependencies>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.3.2</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <version>1.8</version>
+    </dependency>
     <dependency>
     <dependency>
       <groupId>org.apache.storm</groupId>
       <groupId>org.apache.storm</groupId>
       <artifactId>storm-core</artifactId>
       <artifactId>storm-core</artifactId>

+ 2 - 2
ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsReporter.java

@@ -24,8 +24,8 @@ import backtype.storm.generated.TopologySummary;
 import backtype.storm.metric.IClusterReporter;
 import backtype.storm.metric.IClusterReporter;
 import backtype.storm.utils.NimbusClient;
 import backtype.storm.utils.NimbusClient;
 import backtype.storm.utils.Utils;
 import backtype.storm.utils.Utils;
-import org.apache.commons.lang.ClassUtils;
-import org.apache.commons.lang.Validate;
+import org.apache.commons.lang3.ClassUtils;
+import org.apache.commons.lang3.Validate;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;

+ 2 - 2
ambari-metrics/ambari-metrics-storm-sink/src/main/java/org/apache/hadoop/metrics2/sink/storm/StormTimelineMetricsSink.java

@@ -22,8 +22,8 @@ import backtype.storm.metric.api.IMetricsConsumer;
 import backtype.storm.task.IErrorReporter;
 import backtype.storm.task.IErrorReporter;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.task.TopologyContext;
 
 
-import org.apache.commons.lang.ClassUtils;
-import org.apache.commons.lang.math.NumberUtils;
+import org.apache.commons.lang3.ClassUtils;
+import org.apache.commons.lang3.math.NumberUtils;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;