Просмотр исходного кода

HADOOP-7264. Bump avro version to at least 1.4.1. Contributed by Alejandro Abdelnur

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1159699 13f79535-47bb-0310-9956-ffa450edef68
Thomas White 13 лет назад
Родитель
Сommit
6ee5a73e0e

+ 3 - 0
hadoop-common/CHANGES.txt

@@ -328,6 +328,9 @@ Trunk (unreleased changes)
 
     HADOOP-7555. Add a eclipse-generated files to .gitignore. (atm)
 
+    HADOOP-7264. Bump avro version to at least 1.4.1. (Alejandro Abdelnur via
+    tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole

+ 23 - 19
hadoop-common/pom.xml

@@ -219,10 +219,15 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.avro</groupId>
       <artifactId>avro</artifactId>
       <scope>compile</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <scope>compile</scope>
+    </dependency>
     <dependency>
       <groupId>net.sf.kosmosfs</groupId>
       <artifactId>kfs</artifactId>
@@ -322,6 +327,23 @@
           <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>generate-avro-test-sources</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>schema</goal>
+              <goal>protocol</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <testOutputDirectory>${project.build.directory}/generated-test-sources/java</testOutputDirectory>
+        </configuration>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>
@@ -359,24 +381,6 @@
                 <recordcc destdir="${project.build.directory}/generated-test-sources/java">
                   <fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
                 </recordcc>
-
-                <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
-                  <classpath refid="maven.test.classpath"/>
-                </taskdef>
-                <schema destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test">
-                    <include name="**/*.avsc"/>
-                  </fileset>
-                </schema>
-
-                <taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
-                  <classpath refid="maven.test.classpath"/>
-                </taskdef>
-                <schema destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test">
-                    <include name="**/*.avpr"/>
-                  </fileset>
-                </schema>
               </target>
             </configuration>
           </execution>

+ 3 - 2
hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java

@@ -28,6 +28,7 @@ import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DatumReader;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configured;
@@ -93,7 +94,7 @@ public abstract class AvroSerialization<T> extends Configured
     @Override
     public void open(OutputStream out) throws IOException {
       outStream = out;
-      encoder = new BinaryEncoder(out);
+      encoder = EncoderFactory.get().binaryEncoder(out, encoder);
     }
 
     @Override
@@ -127,7 +128,7 @@ public abstract class AvroSerialization<T> extends Configured
     @Override
     public void open(InputStream in) throws IOException {
       inStream = in;
-      decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null);
+      decoder = DecoderFactory.get().binaryDecoder(in, decoder);
     }
 
   }

+ 3 - 3
hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java

@@ -34,9 +34,9 @@ import javax.net.SocketFactory;
 
 import org.apache.avro.ipc.Responder;
 import org.apache.avro.ipc.Transceiver;
-import org.apache.avro.reflect.ReflectRequestor;
-import org.apache.avro.reflect.ReflectResponder;
-import org.apache.avro.specific.SpecificRequestor;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+import org.apache.avro.ipc.specific.SpecificRequestor;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceStability;

+ 2 - 2
hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java

@@ -22,8 +22,8 @@ import java.io.IOException;
 
 import org.apache.avro.ipc.Responder;
 import org.apache.avro.ipc.Transceiver;
-import org.apache.avro.specific.SpecificRequestor;
-import org.apache.avro.specific.SpecificResponder;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
 import org.apache.hadoop.classification.InterfaceStability;
 
 /**

+ 0 - 0
hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroSpecificTestProtocol.avpr → hadoop-common/src/test/avro/AvroSpecificTestProtocol.avpr


+ 0 - 0
hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/avroRecord.avsc → hadoop-common/src/test/avro/avroRecord.avsc


+ 6 - 5
hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java

@@ -18,15 +18,16 @@
 
 package org.apache.hadoop.io;
 
-import java.io.IOException;
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.lang.reflect.Type;
 
 import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.reflect.ReflectDatumWriter;
 import org.apache.avro.reflect.ReflectDatumReader;
-import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DecoderFactory;
 
 import static junit.framework.TestCase.assertEquals;
@@ -47,11 +48,11 @@ public class AvroTestUtil {
     // check that value is serialized correctly
     ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
-    writer.write(value, new BinaryEncoder(out));
+    writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
     ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
     Object after =
-      reader.read(null, DecoderFactory.defaultFactory().createBinaryDecoder(
-          out.toByteArray(), null));
+      reader.read(null,
+                  DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
     assertEquals(value, after);
   }
 

+ 1 - 1
hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java

@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.ipc;
 
-import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.AvroRemoteException;
 
 @SuppressWarnings("serial")
 public interface AvroTestProtocol {

+ 3 - 3
hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java

@@ -28,7 +28,7 @@ import javax.security.sasl.Sasl;
 import junit.framework.Assert;
 import junit.framework.TestCase;
 
-import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.AvroRemoteException;
 import org.apache.avro.util.Utf8;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -189,7 +189,7 @@ public class TestAvroRpc extends TestCase {
         (AvroSpecificTestProtocol)RPC.getProxy(AvroSpecificTestProtocol.class, 
             0, addr, conf);
       
-      Utf8 echo = proxy.echo(new Utf8("hello world"));
+      CharSequence echo = proxy.echo("hello world");
       assertEquals("hello world", echo.toString());
 
       int intResult = proxy.add(1, 2);
@@ -210,7 +210,7 @@ public class TestAvroRpc extends TestCase {
     }
 
     @Override
-    public Utf8 echo(Utf8 msg) throws AvroRemoteException {
+    public CharSequence echo(CharSequence msg) throws AvroRemoteException {
       return msg;
     }
     

+ 13 - 9
hadoop-project/pom.xml

@@ -312,7 +312,7 @@
       <dependency>
         <groupId>org.codehaus.jackson</groupId>
         <artifactId>jackson-mapper-asl</artifactId>
-        <version>1.5.2</version>
+        <version>1.6.9</version>
       </dependency>
       <dependency>
         <groupId>org.aspectj</groupId>
@@ -325,15 +325,14 @@
         <version>1.8.5</version>
       </dependency>
       <dependency>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.avro</groupId>
         <artifactId>avro</artifactId>
-        <version>1.3.2</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.apache.ant</groupId>
-            <artifactId>ant</artifactId>
-          </exclusion>
-        </exclusions>
+        <version>1.5.2</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-ipc</artifactId>
+        <version>1.5.2</version>
       </dependency>
       <dependency>
         <groupId>net.sf.kosmosfs</groupId>
@@ -446,6 +445,11 @@
           <artifactId>maven-deploy-plugin</artifactId>
           <version>2.5</version>
         </plugin>
+        <plugin>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro-maven-plugin</artifactId>
+          <version>1.5.2</version>
+        </plugin>
       </plugins>
     </pluginManagement>